Migration to storage2

This commit is contained in:
Alexander Nozik 2018-08-15 17:58:11 +03:00
parent 4264d51a61
commit 7ad1ef6484
22 changed files with 83 additions and 359 deletions

View File

@ -1,14 +1,14 @@
package inr.numass.data.legacy
import hep.dataforge.meta.Meta
import hep.dataforge.storage.files.FileEnvelope
import hep.dataforge.storage.files.MutableFileEnvelope
import inr.numass.NumassEnvelopeType
import java.nio.ByteBuffer
import java.nio.file.Files
import java.nio.file.Path
import java.nio.file.StandardOpenOption
class NumassFileEnvelope(path: Path) : FileEnvelope(path) {
class NumassFileEnvelope(path: Path) : MutableFileEnvelope(path) {
private val tag by lazy { Files.newByteChannel(path, StandardOpenOption.READ).use { NumassEnvelopeType.LegacyTag().read(it) } }
@ -47,7 +47,7 @@ class NumassFileEnvelope(path: Path) : FileEnvelope(path) {
// val LEGACY_START_SEQUENCE = byteArrayOf('#'.toByte(), '!'.toByte())
// val LEGACY_END_SEQUENCE = byteArrayOf('!'.toByte(), '#'.toByte(), '\r'.toByte(), '\n'.toByte())
//
// fun open(path: Path, readOnly: Boolean): FileEnvelope {
// fun open(path: Path, readOnly: Boolean): MutableFileEnvelope {
// // if (!Files.exists(path)) {
// // throw new RuntimeException("File envelope does not exist");
// // }
@ -58,7 +58,7 @@ class NumassFileEnvelope(path: Path) : FileEnvelope(path) {
// return if (buffer.compareTo(ByteBuffer.wrap(LEGACY_START_SEQUENCE)) == 0) {
// NumassFileEnvelope(path, readOnly)
// } else {
// FileEnvelope.open(path, readOnly)
// MutableFileEnvelope.open(path, readOnly)
// }
// }
// } catch (e: IOException) {

View File

@ -15,7 +15,7 @@ import kotlin.coroutines.experimental.buildSequence
/**
* Created by darksnake on 03-Feb-17.
*/
class NumassDataFactory : DataFactory<NumassSet>(NumassSet::class.java) {
object NumassDataFactory : DataFactory<NumassSet>(NumassSet::class.java) {
override val name = "numass"

View File

@ -16,6 +16,7 @@
package inr.numass.data.storage
import hep.dataforge.context.Context
import hep.dataforge.context.Global
import hep.dataforge.events.Event
import hep.dataforge.events.EventBuilder
import hep.dataforge.meta.Meta
@ -24,7 +25,6 @@ import hep.dataforge.storage.files.FileStorage
import hep.dataforge.storage.files.FileStorageElement
import inr.numass.NumassEnvelopeType
import kotlinx.coroutines.experimental.runBlocking
import java.io.File
import java.nio.file.Files
import java.nio.file.Path
@ -47,8 +47,11 @@ class NumassDirectory : FileStorage.Directory() {
val INSTANCE = NumassDirectory()
const val NUMASS_DIRECTORY_TYPE = "inr.numass.storage.directory"
fun read(context: Context, path: String): FileStorageElement{
return runBlocking { INSTANCE.read(context, File(path).toPath())!!}
/**
* Simple read for scripting and debug
*/
fun read(context: Context = Global, path: String): FileStorageElement?{
return runBlocking { INSTANCE.read(context, context.getDataFile(path).absolutePath)}
}
}
}

View File

@ -1,84 +0,0 @@
package inr.numass.scripts.temp
import hep.dataforge.context.Context
import hep.dataforge.context.Global
import hep.dataforge.grind.Grind
import hep.dataforge.grind.GrindShell
import hep.dataforge.grind.helpers.PlotHelper
import hep.dataforge.plots.PlotFrame
import hep.dataforge.plots.data.DataPlot
import hep.dataforge.tables.Adapters
import hep.dataforge.tables.ColumnTable
import hep.dataforge.tables.Table
import inr.numass.NumassPlugin
import inr.numass.data.NumassDataUtils
import inr.numass.data.analyzers.NumassAnalyzer
import inr.numass.data.analyzers.SmartAnalyzer
import inr.numass.data.api.NumassSet
import inr.numass.data.storage.NumassStorage
import inr.numass.data.storage.NumassStorageFactory
Context ctx = Global.instance()
ctx.getPluginManager().load(NumassPlugin.class)
Table.metaClass.withBinning { int binning ->
return NumassDataUtils.spectrumWithBinning(delegate, binning)
}
Table.metaClass.withDeadTime { double dt = 6.5 ->
double totalCR = delegate.getColumn(NumassAnalyzer.COUNT_RATE_KEY).stream().mapToDouble { it.getDouble() }.sum()
// long totalCount = delegate.getColumn(NumassAnalyzer.COUNT_RATE_KEY).stream().mapToLong() { it.getLong() }.sum()
// double time = totalCount / totalCR
double factor = 1d / (1d - dt * 1e-6 * totalCR)
return ColumnTable.copy(delegate)
.replaceColumn(NumassAnalyzer.COUNT_RATE_KEY) {
it.getDouble(NumassAnalyzer.COUNT_RATE_KEY) * factor
}
.replaceColumn(NumassAnalyzer.COUNT_RATE_ERROR_KEY) {
it.getDouble(NumassAnalyzer.COUNT_RATE_ERROR_KEY) * factor
}
}
new GrindShell(ctx).eval {
File rootDir = new File("D:\\Work\\Numass\\data\\2017_11\\Fill_1")
NumassStorage storage = NumassStorageFactory.buildLocal(rootDir);
NumassSet joined = NumassDataUtils.join("sum", storage.loaders()
.findAll { it instanceof NumassSet }
.collect { it as NumassSet }
)
NumassAnalyzer analyzer = new SmartAnalyzer();
def adapter = Adapters.buildXYAdapter(NumassAnalyzer.CHANNEL_KEY, NumassAnalyzer.COUNT_RATE_KEY, NumassAnalyzer.COUNT_RATE_ERROR_KEY)
def t0 = 15
PlotFrame frame = (plots as PlotHelper).getManager().getPlotFrame("test", "spectra")
frame.plots.setType(DataPlot)
frame.plots.configure(showErrors: false, showSymbol: false, showLine: true, connection: "step")
joined.points.filter { it.voltage in [14000d, 15000d, 16000d, 17000d, 18000d] }.forEach {
//Table spectrum = analyzer.getAmplitudeSpectrum(it, Meta.empty()).withBinning(20).withDeadTime()
Table spectrum = analyzer.getAmplitudeSpectrum(it, Grind.buildMeta(t0: t0*1000)).withBinning(20).withDeadTime(t0)
frame.add(DataPlot.plot(it.voltage.toString(), adapter, spectrum))
}
// def point = joined.points.find { it.voltage == 14000d } as NumassPoint
// PlotFrame pointFrame = (plots as PlotHelper).getManager().getPlotFrame("test", "14000")
//
// pointFrame.plots.setDescriptor(Descriptors.buildDescriptor(DataPlot))
// pointFrame.plots.configure(showErrors: false, showSymbol: false, showLine: true, connection: "step")
//
// [0, 5, 10,15,20].forEach{
// Table spectrum = analyzer.getAmplitudeSpectrum(point, Grind.buildMeta(t0: it*1000)).withBinning(20).withDeadTime(it)
// pointFrame.add(DataPlot.plot(it.toString(), adapter, spectrum))
// }
}

View File

@ -1,17 +0,0 @@
package inr.numass.scripts.temp
import hep.dataforge.context.Context
import hep.dataforge.grind.Grind
import hep.dataforge.grind.GrindShell
import hep.dataforge.storage.api.Storage
import hep.dataforge.storage.commons.StorageManager
new GrindShell().eval {
def ctx = context as Context;
//(LoggerFactory.getLogger(Logger.ROOT_LOGGER_NAME) as Logger).setLevel(Level.INFO)
def storageMeta = Grind.buildMeta(type: "numass", path: "sftp://192.168.111.1/home/trdat/data/2017_11", userName: "trdat", password: "Anomaly")
Storage storage = ctx.load("hep.dataforge:storage", StorageManager).buildStorage(storageMeta);
}

View File

@ -1,37 +0,0 @@
package inr.numass.scripts.times
import hep.dataforge.context.Context
import hep.dataforge.context.Global
import hep.dataforge.grind.GrindShell
import hep.dataforge.io.ColumnedDataWriter
import hep.dataforge.maths.histogram.SimpleHistogram
import hep.dataforge.meta.Meta
import inr.numass.NumassPlugin
import inr.numass.data.analyzers.TimeAnalyzer
import inr.numass.data.api.NumassSet
import inr.numass.data.storage.NumassStorage
import inr.numass.data.storage.NumassStorageFactory
Context ctx = Global.instance()
ctx.getPluginManager().load(FXPlotManager)
ctx.getPluginManager().load(NumassPlugin)
new GrindShell(ctx).eval {
File rootDir = new File("D:\\Work\\Numass\\data\\2017_05\\Fill_2")
NumassStorage storage = NumassStorageFactory.buildLocal(rootDir);
def set = "set_3"
def hv = 14000
def loader = storage.provide("loader::$set", NumassSet.class).get();
def point = loader.optPoint(hv).get()
def table = new SimpleHistogram([0d, 0d] as Double[], [2d, 100d] as Double[])
.fill(new TimeAnalyzer().getEventsWithDelay(point, Meta.empty()).map {
[it.value / 1000, it.key.amp] as Double[]
}).asTable()
ColumnedDataWriter.writeTable(System.out, table, "hist")
storage.close()
}

View File

@ -1,61 +0,0 @@
package inr.numass.scripts.times
import hep.dataforge.context.Context
import hep.dataforge.context.Global
import hep.dataforge.grind.Grind
import hep.dataforge.grind.GrindShell
import hep.dataforge.grind.helpers.PlotHelper
import inr.numass.NumassPlugin
import inr.numass.data.PointAnalyzer
import inr.numass.data.analyzers.TimeAnalyzer
import inr.numass.data.api.MetaBlock
import inr.numass.data.api.NumassPoint
import inr.numass.data.storage.NumassDataLoader
import inr.numass.data.storage.NumassStorage
import inr.numass.data.storage.NumassStorageFactory
/**
* Created by darksnake on 06-Jul-17.
*/
Context ctx = Global.instance()
ctx.getPluginManager().load(FXPlotManager)
ctx.getPluginManager().load(NumassPlugin.class)
new GrindShell(ctx).eval {
PlotHelper plot = plots
File rootDir = new File("D:\\Work\\Numass\\data\\2017_05\\Fill_2")
NumassStorage storage = NumassStorageFactory.buildLocal(rootDir);
def pattern = "set_.{1,2}"
List<NumassDataLoader> loaders = storage.loaders().findAll { it.name.matches(pattern) }.collect {
it as NumassDataLoader
}
println "Found ${loaders.size()} loaders matching pattern"
def hv = 16000.toString();
List<NumassPoint> points = loaders.collect { loader -> loader.optPoint(hv).get() }
def loChannel = 400;
def upChannel = 2000;
def chain = new TimeAnalyzer().getEventsWithDelay(new MetaBlock(points), Grind.buildMeta("window.lo": loChannel, "window.up": upChannel)).mapToLong{it.value}
def histogram = PointAnalyzer.histogram(chain, 1, 500).asTable();
println "finished histogram calculation..."
plot.configure("histogram") {
yAxis(type: "log")
}
plot.plot(name: hv, frame: "histogram", showLine: true, showSymbol: false, showErrors: false, connectionType: "step", histogram, {
adapter("x.value": "x", "y.value": "count")
})
storage.close()
}

View File

@ -1,72 +0,0 @@
package inr.numass.scripts.times
import hep.dataforge.context.Context
import hep.dataforge.context.Global
import hep.dataforge.grind.GrindShell
import hep.dataforge.grind.helpers.PlotHelper
import hep.dataforge.values.ValueMap
import inr.numass.NumassPlugin
import inr.numass.data.PointAnalyzer
import inr.numass.data.analyzers.NumassAnalyzer
import inr.numass.data.api.NumassPoint
import inr.numass.data.storage.ProtoNumassPoint
import java.nio.file.Paths
Context ctx = Global.instance()
ctx.getPluginManager().load(FXPlotManager)
ctx.getPluginManager().load(NumassPlugin.class)
new GrindShell(ctx).eval {
PlotHelper plot = plots
//NumassPoint point = ProtoNumassPoint.readFile(Paths.get("D:\\Work\\Numass\\data\\test\\40_kHz_5s.df"))
NumassPoint point = ProtoNumassPoint.readFile(Paths.get("D:\\Work\\Numass\\data\\2017_05_frames\\Fill_3_events\\set_33\\p102(30s)(HV1=14000).df"))
//NumassPoint point = ProtoNumassPoint.readFile(Paths.get("D:\\Work\\Numass\\data\\2017_05_frames\\Fill_3_events\\set_33\\p0(30s)(HV1=16000).df"))
def loChannel = 500;
def upChannel = 10000;
def histogram = PointAnalyzer.histogram(point, loChannel, upChannel, 0.2, 1000).asTable();
println "finished histogram calculation..."
plot.configure("histogram") {
xAxis(axisTitle: "delay", axisUnits: "us")
yAxis(type: "log")
}
plot.plot(name: "test", frame: "histogram", showLine: true, showSymbol: false, showErrors: false, connectionType: "step", histogram, {
adapter("y.value": "count")
})
def trueCR = PointAnalyzer.analyze(point, t0: 30e3, "window.lo": loChannel, "window.up": upChannel).getDouble("cr")
println "The expected count rate for 30 us delay is $trueCR"
def t0 = (1..150).collect { 420 * it }
def statPlotPoints = t0.collect {
def result = PointAnalyzer.analyze(point, t0: it, "window.lo": loChannel, "window.up": upChannel)
ValueMap.ofMap(x: it / 1000, y: result.getDouble("cr"), "y.err": result.getDouble(NumassAnalyzer.COUNT_RATE_ERROR_KEY));
}
plot.plot(name: "total", frame: "stat-method", showLine: true, thickness: 4, statPlotPoints)
// def delta = 5e-6
// def discrepancyPlotPoints = (1..20).collect { delta * it }.collect {
// def t1 = it
// def t2 = it + delta
// def result = PointAnalyzer.count(point, t1, t2, loChannel, upChannel) - (Math.exp(-trueCR * t1) - Math.exp(-trueCR * t2)) * point.length * trueCR
// ValueMap.ofMap("x.value": it + delta / 2, "y.value": result);
// }
//
// frame.frame(name: hv, frame: "discrepancy", discrepancyPlotPoints)
}

View File

@ -1,16 +0,0 @@
package inr.numass.scripts.workspace
import hep.dataforge.actions.ActionUtils
import hep.dataforge.context.Context
import hep.dataforge.io.OutputManager
import inr.numass.NumassPlugin
/**
* Created by darksnake on 12-Aug-16.
*/
Context context = new Context("numass");
context.loadPlugin(new NumassPlugin());
context.setValue(OutputManager.ROOT_DIRECTORY_CONTEXT_KEY, "D:\\Work\\Numass\\sterile2016");
ActionUtils.runConfig(context, "test.xml").computeAll()

View File

@ -1,12 +0,0 @@
package inr.numass.scripts.workspace
import hep.dataforge.workspace.FileBasedWorkspace
import java.nio.file.Paths
/**
* Created by darksnake on 11-Aug-16.
*/
FileBasedWorkspace.build(Paths.get("D:/Work/Numass/sterile2016/workspace.groovy")).runTask("numass.prepare", "fill_2").computeAll()

View File

@ -156,7 +156,7 @@ object NumassUtils {
builder.putData(pointName, point, pointMeta)
}
runBlocking {
set.getHvData().await()?.let { hv -> builder.putData("hv", hv, Meta.empty()) }
set.getHvData()?.let { hv -> builder.putData("hv", hv, Meta.empty()) }
}
return builder.build()
}

View File

@ -6,8 +6,8 @@
package inr.numass
import hep.dataforge.meta.MetaBuilder
import hep.dataforge.storage.commons.StorageDataFactory
import hep.dataforge.workspace.BasicWorkspace
import inr.numass.data.storage.NumassDataFactory
/**
*
@ -25,7 +25,7 @@ object WorkspaceTest {
val workspace = BasicWorkspace.builder().apply {
this.context = Numass.buildContext()
data("", StorageDataFactory(), MetaBuilder("storage").putValue("path", storagepath))
data("", NumassDataFactory, MetaBuilder("storage").putValue("path", storagepath))
}.build()
}

View File

@ -23,6 +23,7 @@ import inr.numass.data.NumassDataUtils
import inr.numass.data.analyzers.SmartAnalyzer
import inr.numass.data.api.NumassEvent
import inr.numass.data.api.NumassSet
import inr.numass.data.storage.NumassDirectory
import org.apache.commons.math3.stat.correlation.PearsonsCorrelation
import java.util.stream.Stream
@ -46,7 +47,7 @@ fun main(args: Array<String>) {
}
//val rootDir = File("D:\\Work\\Numass\\data\\2017_05\\Fill_2")
val storage = NumassStorageFactory.buildLocal(context, "Fill_2", true, false);
val storage = NumassDirectory.read(context, "Fill_2")!!
val sets = (2..14).map { "set_$it" }

View File

@ -26,6 +26,7 @@ import inr.numass.data.analyzers.NumassAnalyzer.Companion.AMPLITUDE_ADAPTER
import inr.numass.data.analyzers.SmartAnalyzer
import inr.numass.data.analyzers.withBinning
import inr.numass.data.api.NumassSet
import inr.numass.data.storage.NumassDirectory
import inr.numass.displayChart
@ -37,7 +38,7 @@ fun main(args: Array<String>) {
}
//val rootDir = File("D:\\Work\\Numass\\data\\2017_05\\Fill_2")
val storage = NumassStorageFactory.buildLocal(context, "Fill_2", true, false);
val storage = NumassDirectory.read(context, "Fill_2")!!
val sets = (10..24).map { "set_$it" }

View File

@ -28,6 +28,7 @@ import inr.numass.data.analyzers.SmartAnalyzer
import inr.numass.data.analyzers.subtractAmplitudeSpectrum
import inr.numass.data.analyzers.withBinning
import inr.numass.data.api.NumassSet
import inr.numass.data.storage.NumassDirectory
import inr.numass.displayChart
fun main(args: Array<String>) {
@ -38,7 +39,7 @@ fun main(args: Array<String>) {
}
//val rootDir = File("D:\\Work\\Numass\\data\\2017_05\\Fill_2")
val storage = NumassStorageFactory.buildLocal(context, "Fill_2", true, false);
val storage = NumassDirectory.read(context, "Fill_2")!!
val sets = (1..24).map { "set_$it" }

View File

@ -23,7 +23,7 @@ fun main(args: Array<String>) {
dataDir = "D:\\Work\\Numass\\data\\2018_04"
}
val storage = NumassDirectory.read(context, "Fill_4");
val storage = NumassDirectory.read(context, "Fill_4")!!
val meta = buildMeta {
"t0" to 3000

View File

@ -32,7 +32,7 @@ fun main(args: Array<String>) {
dataDir = "D:\\Work\\Numass\\data\\2018_04"
}
val storage = NumassDirectory.read(context, "Fill_4");
val storage = NumassDirectory.read(context, "Fill_4")!!
val meta = buildMeta {
"t0" to 3000

View File

@ -38,7 +38,7 @@ fun main(args: Array<String>) {
val cr = 3.0
val length = (30000 *1e9).toLong()
val num = 1
val num = 10
val dt = 6.5
val start = Instant.now()
@ -49,7 +49,7 @@ fun main(args: Array<String>) {
.generateEvents(cr)
val bunches = NumassGenerator
.generateBunches(3.0, 0.02, 5.0)
.generateBunches(6.0, 0.01, 5.0)
val discharges = NumassGenerator
.generateBunches(50.0,0.001,0.1)

View File

@ -17,14 +17,16 @@
package inr.numass.scripts.tristan
import hep.dataforge.context.Global
import hep.dataforge.storage.files.FileStorage
import hep.dataforge.toList
import inr.numass.data.api.NumassPoint
import inr.numass.data.channel
import inr.numass.data.storage.NumassDataLoader
import inr.numass.data.storage.NumassDirectory
fun main(args: Array<String>) {
val storage = NumassStorageFactory.buildLocal(Global, "D:\\Work\\Numass\\data\\2018_04\\Adiabacity_19\\", true, false)
val set = storage.optLoader("set_4").get() as NumassDataLoader
val storage = NumassDirectory.read(Global, "D:\\Work\\Numass\\data\\2018_04\\Adiabacity_19\\") as FileStorage
val set = storage["set_4"] as NumassDataLoader
set.points.forEach { point ->
if (point.voltage == 18700.0) {
println("${point.index}:")

View File

@ -6,32 +6,35 @@ import hep.dataforge.meta.Meta
import hep.dataforge.meta.MetaBuilder
import hep.dataforge.meta.MetaUtils
import hep.dataforge.meta.buildMeta
import hep.dataforge.storage.Storage
import hep.dataforge.useValue
import inr.numass.data.storage.NumassDataLoader
import inr.numass.data.storage.NumassDirectory
import kotlinx.coroutines.experimental.runBlocking
import java.io.File
import java.nio.file.Paths
private fun createSummaryNode(storage: Storage): MetaBuilder {
private suspend fun createSummaryNode(storage: Storage): MetaBuilder {
Global.logger.info("Reading content of shelf {}", storage.fullName)
val builder = MetaBuilder("shelf")
.setValue("name", storage.name)
.setValue("path", storage.fullName)
storage.shelves().filter { it.name.startsWith("Fill") }.forEach {
builder.putNode(createSummaryNode(it))
}
storage.loaders().filterIsInstance(NumassDataLoader::class.java).forEach { set ->
Global.logger.info("Reading content of set {}", set.fullName)
storage.getChildren().forEach { element ->
if(element is Storage && element.name.startsWith("Fill")){
builder.putNode(createSummaryNode(element))
} else if(element is NumassDataLoader){
Global.logger.info("Reading content of set {}", element.fullName)
val setBuilder = MetaBuilder("set")
.setValue("name", set.name)
.setValue("path", set.fullName)
.setValue("name", element.name)
.setValue("path", element.fullName)
if (set.name.endsWith("bad")) {
if (element.name.endsWith("bad")) {
setBuilder.setValue("bad", true)
}
set.points.forEach { point ->
element.points.forEach { point ->
val pointBuilder = MetaBuilder("point")
.setValue("index", point.index)
.setValue("hv", point.voltage)
@ -42,14 +45,15 @@ private fun createSummaryNode(storage: Storage): MetaBuilder {
pointBuilder.setValue("length", it.double)
}
point.meta.useValue("events") {
pointBuilder.setValue("count", it.list.stream().mapToInt { it.int }.sum())
point.meta.useValue("events") { value ->
pointBuilder.setValue("count", value.list.stream().mapToInt { it.int }.sum())
}
setBuilder.putNode(pointBuilder)
}
builder.putNode(setBuilder)
}
}
return builder
}
@ -74,14 +78,13 @@ fun main(args: Array<String>) {
""
}
val path = Paths.get(directory)
val output = File(directory, "summary.xml")
output.createNewFile()
val storage = NumassStorageFactory.buildLocal(Global, path, true, false)
val summary = createSummaryNode(storage)
val storage = NumassDirectory.read(Global, directory) as Storage
val summary = runBlocking { createSummaryNode(storage)}
Global.logger.info("Writing output meta")
output.outputStream().use {

View File

@ -6,6 +6,7 @@ import hep.dataforge.data.DataNode
import hep.dataforge.data.DataSet
import hep.dataforge.meta.Meta
import hep.dataforge.meta.buildMeta
import hep.dataforge.storage.Storage
import hep.dataforge.tables.ListTable
import hep.dataforge.tables.Table
import hep.dataforge.values.ValueMap
@ -15,37 +16,48 @@ import inr.numass.data.analyzers.NumassAnalyzer.Companion.COUNT_RATE_KEY
import inr.numass.data.analyzers.TimeAnalyzer
import inr.numass.data.analyzers.withBinning
import inr.numass.data.api.NumassPoint
import inr.numass.data.api.NumassSet
import inr.numass.data.api.SimpleNumassPoint
import inr.numass.data.storage.NumassDataLoader
import inr.numass.data.storage.NumassDirectory
import kotlinx.coroutines.experimental.runBlocking
import org.apache.commons.math3.analysis.ParametricUnivariateFunction
import org.apache.commons.math3.exception.DimensionMismatchException
import org.apache.commons.math3.fitting.SimpleCurveFitter
import org.apache.commons.math3.fitting.WeightedObservedPoint
import java.util.stream.Collectors
import kotlin.coroutines.experimental.buildSequence
object Threshold {
fun getSpectraMap(context: Context, meta: Meta): DataNode<Table> {
suspend fun getSpectraMap(context: Context, meta: Meta): DataNode<Table> {
//creating storage instance
val storage = NumassStorageFactory.buildLocal(context, meta.getString("data.dir"), true, false);
val storage = NumassDirectory.read(context, meta.getString("data.dir")) as Storage
fun Storage.loaders(): Sequence<NumassDataLoader>{
return buildSequence<NumassDataLoader> {
print("Reading ${this@loaders.fullName}")
runBlocking { this@loaders.getChildren()}.forEach {
if(it is NumassDataLoader){
yield(it)
} else if (it is Storage){
yieldAll(it.loaders())
}
}
}
}
//Reading points
//Free operation. No reading done
val sets = StorageUtils
.loaderStream(storage)
val sets = storage.loaders()
.filter { it.fullName.toString().matches(meta.getString("data.mask").toRegex()) }
.map {
println("loading ${it.fullName}")
it as NumassSet
}.collect(Collectors.toList());
val analyzer = TimeAnalyzer();
val data = DataSet.edit(NumassPoint::class).also { dataBuilder ->
sets.sortedBy { it.startTime }
.flatMap { set -> set.points.toList() }
.flatMap { set -> set.points.asSequence() }
.groupBy { it.voltage }
.forEach { key, value ->
val point = SimpleNumassPoint(value, key)

View File

@ -194,7 +194,7 @@ class MainView(val context: Context = Global.getContext("viewer")) : View(title
val point = NumassPoint.read(it)
runLater {
contentView = AmplitudeView().apply {
set(path.toString(), CachedPoint(point))
set(path.fileName.toString(), CachedPoint(point))
}
infoView = PointInfoView(CachedPoint(point))
}