Minor fixes to storage

This commit is contained in:
Alexander Nozik 2017-12-26 16:15:44 +03:00
parent c38a346316
commit 804221c859
10 changed files with 213 additions and 238 deletions

View File

@ -5,8 +5,7 @@ import hep.dataforge.storage.filestorage.FileEnvelope;
import inr.numass.NumassEnvelopeType; import inr.numass.NumassEnvelopeType;
import java.io.IOException; import java.io.IOException;
import java.nio.ByteBuffer; import java.io.InputStream;
import java.nio.channels.SeekableByteChannel;
import java.nio.file.Files; import java.nio.file.Files;
import java.nio.file.Path; import java.nio.file.Path;
import java.util.Arrays; import java.util.Arrays;
@ -20,10 +19,10 @@ public class NumassFileEnvelope extends FileEnvelope {
if (!Files.exists(path)) { if (!Files.exists(path)) {
throw new RuntimeException("File envelope does not exist"); throw new RuntimeException("File envelope does not exist");
} }
try (SeekableByteChannel channel = Files.newByteChannel(path, READ)) { try (InputStream stream = Files.newInputStream(path, READ)) {
ByteBuffer header = ByteBuffer.allocate(2); byte[] bytes = new byte[2];
channel.read(header); stream.read(bytes);
if (Arrays.equals(header.array(), LEGACY_START_SEQUENCE)) { if (Arrays.equals(bytes, LEGACY_START_SEQUENCE)) {
return new NumassFileEnvelope(path, readOnly); return new NumassFileEnvelope(path, readOnly);
} else { } else {
return FileEnvelope.open(path, readOnly); return FileEnvelope.open(path, readOnly);

View File

@ -30,7 +30,7 @@ public class NumassStorageFactory implements StorageType {
@NotNull @NotNull
public static FileStorage buildLocal(Context context, Path file, boolean readOnly, boolean monitor) { public static FileStorage buildLocal(Context context, Path file, boolean readOnly, boolean monitor) {
StorageManager manager = context.loadFeature("hep.dataforge:storage", StorageManager.class); StorageManager manager = context.loadFeature("hep.dataforge:storage", StorageManager.class);
return (FileStorage) manager.buildStorage(buildStorageMeta(file.toUri(),readOnly,monitor)); return (FileStorage) manager.buildStorage(buildStorageMeta(file.toUri(), readOnly, monitor));
} }
@NotNull @NotNull
@ -57,8 +57,8 @@ public class NumassStorageFactory implements StorageType {
int port = meta.getInt("port", 22); int port = meta.getInt("port", 22);
SFTPEnvironment env = new SFTPEnvironment() SFTPEnvironment env = new SFTPEnvironment()
.withUsername(username) .withUsername(username)
.withPassword(meta.getString("password","").toCharArray()); .withPassword(meta.getString("password", "").toCharArray());
FileSystem fs = FileSystems.newFileSystem(uri, env,context.getClassLoader()); FileSystem fs = FileSystems.newFileSystem(uri, env, context.getClassLoader());
path = fs.getPath(uri.getPath()); path = fs.getPath(uri.getPath());
} catch (Exception e) { } catch (Exception e) {
throw new RuntimeException(e); throw new RuntimeException(e);

View File

@ -124,39 +124,6 @@ interface NumassAnalyzer {
return spectrumWithBinning(this,binSize, loChannel, upChannel) return spectrumWithBinning(this,binSize, loChannel, upChannel)
} }
/**
* Subtract reference spectrum.
*
* @param sp1
* @param sp2
* @return
*/
fun subtractAmplitudeSpectrum(sp1: Table, sp2: Table): Table {
val format = TableFormatBuilder()
.addNumber(CHANNEL_KEY, X_VALUE_KEY)
.addNumber(COUNT_RATE_KEY, Y_VALUE_KEY)
.addNumber(COUNT_RATE_ERROR_KEY, Y_ERROR_KEY)
.build()
val builder = ListTable.Builder(format)
sp1.forEach { row1 ->
val channel = row1.getDouble(CHANNEL_KEY)
val row2 = sp2.rows.asSequence().find { it.getDouble(CHANNEL_KEY) == channel } //t2[channel]
if (row2 == null) {
throw RuntimeException("Reference for channel $channel not found");
} else {
val value = Math.max(row1.getDouble(COUNT_RATE_KEY) - row2.getDouble(COUNT_RATE_KEY), 0.0)
val error1 = row1.getDouble(COUNT_RATE_ERROR_KEY)
val error2 = row2.getDouble(COUNT_RATE_ERROR_KEY)
val error = Math.sqrt(error1 * error1 + error2 * error2)
builder.row(channel, value, error)
}
}
return builder.build()
}
} }
} }
@ -262,3 +229,36 @@ fun spectrumWithBinning(spectrum: Table, binSize: Int, loChannel: Int? = null, u
} }
return builder.build() return builder.build()
} }
/**
* Subtract reference spectrum.
*
* @param sp1
* @param sp2
* @return
*/
fun subtractAmplitudeSpectrum(sp1: Table, sp2: Table): Table {
val format = TableFormatBuilder()
.addNumber(NumassAnalyzer.CHANNEL_KEY, X_VALUE_KEY)
.addNumber(NumassAnalyzer.COUNT_RATE_KEY, Y_VALUE_KEY)
.addNumber(NumassAnalyzer.COUNT_RATE_ERROR_KEY, Y_ERROR_KEY)
.build()
val builder = ListTable.Builder(format)
sp1.forEach { row1 ->
val channel = row1.getDouble(NumassAnalyzer.CHANNEL_KEY)
val row2 = sp2.rows.asSequence().find { it.getDouble(NumassAnalyzer.CHANNEL_KEY) == channel } //t2[channel]
if (row2 == null) {
throw RuntimeException("Reference for channel $channel not found");
} else {
val value = Math.max(row1.getDouble(NumassAnalyzer.COUNT_RATE_KEY) - row2.getDouble(NumassAnalyzer.COUNT_RATE_KEY), 0.0)
val error1 = row1.getDouble(NumassAnalyzer.COUNT_RATE_ERROR_KEY)
val error2 = row2.getDouble(NumassAnalyzer.COUNT_RATE_ERROR_KEY)
val error = Math.sqrt(error1 * error1 + error2 * error2)
builder.row(channel, value, error)
}
}
return builder.build()
}

View File

@ -138,9 +138,9 @@ class TimeAnalyzer @JvmOverloads constructor(private val processor: SignalProces
) )
private fun getT0(block: NumassBlock, meta: Meta): Int { private fun getT0(block: NumassBlock, meta: Meta): Int {
return if (meta.hasValue("t0")) { return if (meta.hasValue("t0")) {
meta.getInt("t0")!! meta.getInt("t0")
} else if (meta.hasMeta("t0")) { } else if (meta.hasMeta("t0")) {
val fraction = meta.getDouble("t0.crFraction")!! val fraction = meta.getDouble("t0.crFraction")
val cr = estimateCountRate(block) val cr = estimateCountRate(block)
if (cr < meta.getDouble("t0.minCR", 0.0)) { if (cr < meta.getDouble("t0.minCR", 0.0)) {
0 0
@ -169,7 +169,7 @@ class TimeAnalyzer @JvmOverloads constructor(private val processor: SignalProces
* @return * @return
*/ */
fun getEventsWithDelay(block: NumassBlock, config: Meta): Stream<Pair<NumassEvent, Long>> { fun getEventsWithDelay(block: NumassBlock, config: Meta): Stream<Pair<NumassEvent, Long>> {
val inverted = config.getBoolean("inverted",false) val inverted = config.getBoolean("inverted",true)
return super.getEvents(block, config).asSequence().zipWithNext { prev, next -> return super.getEvents(block, config).asSequence().zipWithNext { prev, next ->
val delay = Math.max(next.timeOffset - prev.timeOffset, 0) val delay = Math.max(next.timeOffset - prev.timeOffset, 0)
if(inverted){ if(inverted){

View File

@ -21,7 +21,8 @@ import hep.dataforge.tables.Adapters
import hep.dataforge.tables.Table import hep.dataforge.tables.Table
import hep.dataforge.tables.TableTransform import hep.dataforge.tables.TableTransform
import inr.numass.NumassPlugin import inr.numass.NumassPlugin
import inr.numass.data.NumassDataUtils import inr.numass.data.analyzers.NumassAnalyzerKt
import inr.numass.subthreshold.SubFitKt
import javafx.application.Platform import javafx.application.Platform
import static hep.dataforge.grind.Grind.buildMeta import static hep.dataforge.grind.Grind.buildMeta
@ -33,17 +34,17 @@ ctx.getPluginManager().load(PlotManager)
ctx.getPluginManager().load(NumassPlugin) ctx.getPluginManager().load(NumassPlugin)
ctx.getPluginManager().load(CachePlugin) ctx.getPluginManager().load(CachePlugin)
Meta meta = buildMeta { Meta meta = buildMeta(t0: 3e4, inverted: false) {
data(dir: "D:\\Work\\Numass\\data\\2017_05\\Fill_2", mask: "set_.{1,3}") data(dir: "D:\\Work\\Numass\\data\\2017_11\\Fill_2", mask: "set_2")
generate(t0: 3e4)
subtract(reference: 18500) subtract(reference: 18500)
fit(xLow: 450, xHigh: 700, upper: 3100, binning: 20) fit(xLow: 400, xHigh: 600, upper: 3000, binning: 20)
window(lo: 300, up: 3000)
} }
def shell = new GrindShell(ctx); def shell = new GrindShell(ctx);
DataNode<Table> spectra = UnderflowUtils.getSpectraMap(shell, meta); DataNode<Table> spectra = SubFitKt.getSpectraMap(ctx, meta).computeAll();
shell.eval { shell.eval {
@ -56,7 +57,7 @@ shell.eval {
spectraMap = spectra spectraMap = spectra
.findAll { it.name != referenceVoltage } .findAll { it.name != referenceVoltage }
.collectEntries { .collectEntries {
[(it.meta["voltage"]): NumassDataUtils.subtractSpectrum(it.get(), referencePoint)] [(it.meta["voltage"]): NumassAnalyzerKt.subtractAmplitudeSpectrum(it.get(), referencePoint)]
} }
} else { } else {
spectraMap = spectra.collectEntries { return [(it.meta["voltage"]): it.get()] } spectraMap = spectra.collectEntries { return [(it.meta["voltage"]): it.get()] }
@ -71,7 +72,7 @@ shell.eval {
DataPlot.plot( DataPlot.plot(
it.key as String, it.key as String,
adapter, adapter,
NumassDataUtils.spectrumWithBinning(it.value as Table, binning) NumassAnalyzerKt.spectrumWithBinning(it.value as Table, binning)
) )
) )
} }
@ -85,10 +86,10 @@ shell.eval {
showPoints(spectraMap.findAll { it.key in [16200d, 16400d, 16800d, 17000d, 17200d, 17700d] }) showPoints(spectraMap.findAll { it.key in [16200d, 16400d, 16800d, 17000d, 17200d, 17700d] })
[550, 600, 650, 700, 750].each { xHigh -> [500, 550, 600, 650, 700].each { xHigh ->
println "Caclculate correctuion for upper linearity bound: ${xHigh}" println "Caclculate correctuion for upper linearity bound: ${xHigh}"
Table correctionTable = TableTransform.filter( Table correctionTable = TableTransform.filter(
UnderflowFitter.fitAllPoints( SubFitKt.fitAllPoints(
spectraMap, spectraMap,
meta["fit.xLow"] as int, meta["fit.xLow"] as int,
xHigh, xHigh,
@ -100,7 +101,7 @@ shell.eval {
2 2
) )
if (xHigh == 700) { if (xHigh == 600) {
ColumnedDataWriter.writeTable(System.out, correctionTable, "underflow parameters") ColumnedDataWriter.writeTable(System.out, correctionTable, "underflow parameters")
} }
@ -115,10 +116,10 @@ shell.eval {
} }
[400, 450, 500].each { xLow -> [350, 400, 450].each { xLow ->
println "Caclculate correctuion for lower linearity bound: ${xLow}" println "Caclculate correctuion for lower linearity bound: ${xLow}"
Table correctionTable = TableTransform.filter( Table correctionTable = TableTransform.filter(
UnderflowFitter.fitAllPoints( SubFitKt.fitAllPoints(
spectraMap, spectraMap,
xLow, xLow,
meta["fit.xHigh"] as int, meta["fit.xHigh"] as int,

View File

@ -1,106 +0,0 @@
package inr.numass.scripts.underflow
import groovy.transform.CompileStatic
import hep.dataforge.tables.ListTable
import hep.dataforge.tables.Table
import hep.dataforge.tables.TableTransform
import hep.dataforge.tables.ValueMap
import hep.dataforge.values.Values
import inr.numass.data.analyzers.NumassAnalyzerKt
import org.apache.commons.math3.analysis.ParametricUnivariateFunction
import org.apache.commons.math3.exception.DimensionMismatchException
import org.apache.commons.math3.fitting.SimpleCurveFitter
import org.apache.commons.math3.fitting.WeightedObservedPoint
import java.util.stream.Collectors
import static inr.numass.data.analyzers.NumassAnalyzer.CHANNEL_KEY
import static inr.numass.data.analyzers.NumassAnalyzer.COUNT_RATE_KEY
@CompileStatic
class UnderflowFitter {
private static String[] pointNames = ["U", "amp", "expConst", "correction"];
static Values fitPoint(Double voltage, Table spectrum, int xLow, int xHigh, int upper, int binning) {
double norm = spectrum.getRows().filter { Values row ->
int channel = row.getInt(CHANNEL_KEY);
return channel > xLow && channel < upper;
}.mapToDouble { it.getValue(COUNT_RATE_KEY).doubleValue() }.sum();
double[] fitRes = getUnderflowExpParameters(spectrum, xLow, xHigh, binning);
double a = fitRes[0];
double sigma = fitRes[1];
return ValueMap.of(pointNames, voltage, a, sigma, a * sigma * Math.exp(xLow / sigma as double) / norm + 1d);
}
static Table fitAllPoints(Map<Double, Table> data, int xLow, int xHigh, int upper, int binning) {
ListTable.Builder builder = new ListTable.Builder(pointNames);
data.each { voltage, spectrum -> builder.row(fitPoint(voltage, spectrum, xLow, xHigh, upper, binning)) }
return builder.build();
}
/**
* Calculate underflow exponent parameters using (xLow, xHigh) window for
* extrapolation
*
* @param xLow
* @param xHigh
* @return
*/
private static double[] getUnderflowExpParameters(Table spectrum, int xLow, int xHigh, int binning) {
try {
if (xHigh <= xLow) {
throw new IllegalArgumentException("Wrong borders for underflow calculation");
}
Table binned = TableTransform.filter(
NumassAnalyzerKt.spectrumWithBinning(spectrum, binning),
CHANNEL_KEY,
xLow,
xHigh
);
List<WeightedObservedPoint> points = binned.getRows()
.map {
new WeightedObservedPoint(
1d,//1d / p.getValue() , //weight
it.getDouble(CHANNEL_KEY) as double, // x
it.getDouble(COUNT_RATE_KEY) / binning as double) //y
}
.collect(Collectors.toList());
SimpleCurveFitter fitter = SimpleCurveFitter.create(new ExponentFunction(), [1d, 200d] as double[])
return fitter.fit(points);
} catch (Exception ex) {
return [0, 0] as double[];
}
}
/**
* Exponential function for fitting
*/
private static class ExponentFunction implements ParametricUnivariateFunction {
@Override
public double value(double x, double ... parameters) {
if (parameters.length != 2) {
throw new DimensionMismatchException(parameters.length, 2);
}
double a = parameters[0];
double sigma = parameters[1];
//return a * (Math.exp(x / sigma) - 1);
return a * Math.exp(x / sigma as double);
}
@Override
public double[] gradient(double x, double ... parameters) {
if (parameters.length != 2) {
throw new DimensionMismatchException(parameters.length, 2);
}
double a = parameters[0];
double sigma = parameters[1];
return [Math.exp(x / sigma as double), -a * x / sigma / sigma * Math.exp(x / sigma as double)] as double[]
}
}
}

View File

@ -1,73 +0,0 @@
package inr.numass.scripts.underflow
import hep.dataforge.cache.CachePlugin
import hep.dataforge.data.DataNode
import hep.dataforge.data.DataSet
import hep.dataforge.grind.GrindShell
import hep.dataforge.grind.actions.GrindPipe
import hep.dataforge.meta.Meta
import hep.dataforge.storage.commons.StorageUtils
import hep.dataforge.tables.Table
import inr.numass.data.analyzers.NumassAnalyzer
import inr.numass.data.analyzers.TimeAnalyzer
import inr.numass.data.api.NumassPoint
import inr.numass.data.api.NumassSet
import inr.numass.data.api.SimpleNumassPoint
import inr.numass.data.storage.NumassStorage
import inr.numass.data.storage.NumassStorageFactory
import java.util.stream.Collectors
import static hep.dataforge.grind.Grind.buildMeta
class UnderflowUtils {
static DataNode<Table> getSpectraMap(GrindShell shell, Meta meta) {
return shell.eval {
//Defining root directory
File dataDirectory = new File(meta.getString("data.dir"))
//creating storage instance
NumassStorage storage = NumassStorageFactory.buildLocal(dataDirectory);
//Reading points
//Free operation. No reading done
List<NumassSet> sets = StorageUtils
.loaderStream(storage)
.filter { it.key.matches(meta.getString("data.mask")) }
.map {
println "loading ${it.key}"
return it.value
}.collect(Collectors.toList());
NumassAnalyzer analyzer = new TimeAnalyzer();
def dataBuilder = DataSet.builder(NumassPoint);
sets.sort { it.startTime }
.collectMany { NumassSet set -> set.points.collect() }
.groupBy { NumassPoint point -> point.voltage }
.each { key, value ->
def point = new SimpleNumassPoint(key as double, value as List<NumassPoint>)
String name = (key as Integer).toString()
dataBuilder.putStatic(name, point, buildMeta(voltage: key));
}
DataNode<NumassPoint> data = dataBuilder.build()
def generate = GrindPipe.build("generate") {
result { input ->
return analyzer.getAmplitudeSpectrum(input as NumassPoint, delegate.meta)
}
}
DataNode<Table> spectra = generate.run(shell.context, data, meta.getMeta("generate"));
Meta id = buildMeta {
put meta.getMeta("data")
put meta.getMeta("generate")
}
return shell.context.getFeature(CachePlugin).cacheNode("underflow", id, spectra)
} as DataNode<Table>
}
}

View File

@ -26,6 +26,7 @@ import inr.numass.NumassPlugin
import inr.numass.data.NumassDataUtils import inr.numass.data.NumassDataUtils
import inr.numass.data.analyzers.NumassAnalyzer import inr.numass.data.analyzers.NumassAnalyzer
import inr.numass.data.analyzers.SmartAnalyzer import inr.numass.data.analyzers.SmartAnalyzer
import inr.numass.data.analyzers.subtractAmplitudeSpectrum
import inr.numass.data.api.NumassSet import inr.numass.data.api.NumassSet
import inr.numass.data.storage.NumassStorageFactory import inr.numass.data.storage.NumassStorageFactory

View File

@ -0,0 +1,149 @@
package inr.numass.subthreshold
import hep.dataforge.context.Context
import hep.dataforge.data.DataNode
import hep.dataforge.data.DataSet
import hep.dataforge.kodex.buildMeta
import hep.dataforge.kodex.pipe
import hep.dataforge.kodex.toList
import hep.dataforge.meta.Meta
import hep.dataforge.storage.commons.StorageUtils
import hep.dataforge.tables.ListTable
import hep.dataforge.tables.Table
import hep.dataforge.tables.TableTransform
import hep.dataforge.tables.ValueMap
import hep.dataforge.values.Values
import inr.numass.data.analyzers.NumassAnalyzer.Companion.CHANNEL_KEY
import inr.numass.data.analyzers.NumassAnalyzer.Companion.COUNT_RATE_KEY
import inr.numass.data.analyzers.TimeAnalyzer
import inr.numass.data.analyzers.spectrumWithBinning
import inr.numass.data.api.NumassPoint
import inr.numass.data.api.NumassSet
import inr.numass.data.api.SimpleNumassPoint
import inr.numass.data.storage.NumassStorageFactory
import org.apache.commons.math3.analysis.ParametricUnivariateFunction
import org.apache.commons.math3.exception.DimensionMismatchException
import org.apache.commons.math3.fitting.SimpleCurveFitter
import org.apache.commons.math3.fitting.WeightedObservedPoint
import java.util.stream.Collectors
internal fun getSpectraMap(context: Context, meta: Meta): DataNode<Table> {
//creating storage instance
val storage = NumassStorageFactory.buildLocal(context, meta.getString("data.dir"), true, false);
//Reading points
//Free operation. No reading done
val sets = StorageUtils
.loaderStream(storage)
.filter { it.fullName.toString().matches(meta.getString("data.mask").toRegex()) }
.map {
println("loading ${it.fullName}")
it as NumassSet
}.collect(Collectors.toList());
val analyzer = TimeAnalyzer();
val data = DataSet.builder(NumassPoint::class.java).also { dataBuilder ->
sets.sortedBy { it.startTime }
.flatMap { set -> set.points.toList() }
.groupBy { it.voltage }
.forEach { key, value ->
val point = SimpleNumassPoint(key, value)
val name = key.toInt().toString()
dataBuilder.putStatic(name, point, buildMeta("meta", "voltage" to key));
}
}.build()
return data.pipe(context, meta) {
result { input ->
analyzer.getAmplitudeSpectrum(input, this.meta)
}
}
// val id = buildMeta {
// +meta.getMeta("data")
// +meta.getMeta("analyze")
// }
// return context.getFeature(CachePlugin::class.java).cacheNode("subThreshold", id, spectra)
}
private val pointNames = arrayOf("U", "amp", "expConst", "correction");
/**
* Exponential function for fitting
*/
private class ExponentFunction : ParametricUnivariateFunction {
override fun value(x: Double, vararg parameters: Double): Double {
if (parameters.size != 2) {
throw DimensionMismatchException(parameters.size, 2);
}
val a = parameters[0];
val sigma = parameters[1];
//return a * (Math.exp(x / sigma) - 1);
return a * Math.exp(x / sigma);
}
override fun gradient(x: Double, vararg parameters: Double): DoubleArray {
if (parameters.size != 2) {
throw DimensionMismatchException(parameters.size, 2);
}
val a = parameters[0];
val sigma = parameters[1];
return doubleArrayOf(Math.exp(x / sigma), -a * x / sigma / sigma * Math.exp(x / sigma))
}
}
/**
* Calculate underflow exponent parameters using (xLow, xHigh) window for
* extrapolation
*
* @param xLow
* @param xHigh
* @return
*/
private fun getUnderflowExpParameters(spectrum: Table, xLow: Int, xHigh: Int, binning: Int): Pair<Double, Double> {
try {
if (xHigh <= xLow) {
throw IllegalArgumentException("Wrong borders for underflow calculation");
}
val binned = TableTransform.filter(
spectrumWithBinning(spectrum, binning),
CHANNEL_KEY,
xLow,
xHigh
);
val points = binned.rows
.map {
WeightedObservedPoint(
1.0,//1d / p.getValue() , //weight
it.getDouble(CHANNEL_KEY), // x
it.getDouble(COUNT_RATE_KEY) / binning) //y
}
.collect(Collectors.toList());
val fitter = SimpleCurveFitter.create(ExponentFunction(), doubleArrayOf(1.0, 200.0))
val res = fitter.fit(points)
return Pair(res[0], res[1])
} catch (ex: Exception) {
return Pair(0.0, 0.0);
}
}
internal fun fitPoint(voltage: Double, spectrum: Table, xLow: Int, xHigh: Int, upper: Int, binning: Int): Values {
val norm = spectrum.rows.filter { row ->
row.getInt(CHANNEL_KEY) in (xLow + 1)..(upper - 1);
}.mapToDouble { it.getValue(COUNT_RATE_KEY).doubleValue() }.sum();
val (a, sigma) = getUnderflowExpParameters(spectrum, xLow, xHigh, binning);
return ValueMap.of(pointNames, voltage, a, sigma, a * sigma * Math.exp(xLow / sigma) / norm + 1.0);
}
internal fun fitAllPoints(data: Map<Double, Table>, xLow: Int, xHigh: Int, upper: Int, binning: Int): Table {
return ListTable.Builder(*pointNames).apply {
data.forEach { voltage, spectrum -> row(fitPoint(voltage, spectrum, xLow, xHigh, upper, binning)) }
}.build()
}

View File

@ -272,17 +272,21 @@ class StorageView(private val context: Context = Global.instance()) : View(title
// } // }
private fun loadDirectory(path: URI) { private fun loadDirectory(path: URI) {
statusBar.text = "Loading storage: $path"
statusBar.progress = -1.0;
runGoal("loadDirectory[$path]") { runGoal("loadDirectory[$path]") {
title = "Load storage ($path)" title = "Load storage ($path)"
progress = -1.0 progress = -1.0
message = "Building numass storage tree..." message = "Building numass storage tree..."
(StorageManager.buildStorage(context, NumassStorageFactory.buildStorageMeta(path, true, true)) as NumassStorage).also { (StorageManager.buildStorage(context, NumassStorageFactory.buildStorageMeta(path, true, false)) as NumassStorage).also {
progress = 1.0 progress = 1.0
} }
} ui { } ui {
storage = it storage = it
storageName = "Storage: " + path storageName = "Storage: " + path
statusBar.text = "OK"
statusBar.progress = 0.0;
} }
} }