A lot of fixes. IO needs work

This commit is contained in:
Alexander Nozik 2018-02-07 17:04:31 +03:00
parent 25dc485ebf
commit 82d434c5c1
25 changed files with 108 additions and 347 deletions

View File

@ -21,7 +21,7 @@ import java.util.Properties;
public class NumassProperties { public class NumassProperties {
private static File getNumassPropertiesFile() throws IOException { private static File getNumassPropertiesFile() throws IOException {
File file = new File(Global.Companion.instance().getUserDirectory(), "numass"); File file = new File(Global.INSTANCE.getUserDirectory(), "numass");
if (!file.exists()) { if (!file.exists()) {
file.mkdirs(); file.mkdirs();
} }
@ -54,7 +54,7 @@ public class NumassProperties {
} }
props.store(new FileOutputStream(store), ""); props.store(new FileOutputStream(store), "");
} catch (IOException ex) { } catch (IOException ex) {
Global.Companion.instance().getLogger().error("Failed to save numass properties", ex); Global.INSTANCE.getLogger().error("Failed to save numass properties", ex);
} }
} }
} }

View File

@ -23,9 +23,9 @@ public class SimpleNumassPoint extends MetaHolder implements NumassPoint {
* @param blocks * @param blocks
*/ */
public SimpleNumassPoint(double voltage, Collection<? extends NumassBlock> blocks) { public SimpleNumassPoint(double voltage, Collection<? extends NumassBlock> blocks) {
super(new MetaBuilder("point").setValue(HV_KEY, voltage));
this.blocks = new ArrayList<>(blocks); this.blocks = new ArrayList<>(blocks);
this.blocks.sort(Comparator.comparing(NumassBlock::getStartTime)); this.blocks.sort(Comparator.comparing(NumassBlock::getStartTime));
super.setMeta(new MetaBuilder("point").setValue(HV_KEY, voltage));
} }
public SimpleNumassPoint(Meta meta, Collection<? extends NumassBlock> blocks) { public SimpleNumassPoint(Meta meta, Collection<? extends NumassBlock> blocks) {

View File

@ -27,7 +27,7 @@ public class NumassDataFactory extends DataFactory<NumassSet> {
@Override @Override
protected void fill(DataTree.Builder<NumassSet> builder, Context context, Meta meta) { protected void fill(DataTree.Builder<NumassSet> builder, Context context, Meta meta) {
Meta newMeta = meta.getBuilder().setValue("type", "numass"); Meta newMeta = meta.getBuilder().setValue("type", "numass");
Storage storage = context.loadFeature("hep.dataforge:storage", StorageManager.class).buildStorage(newMeta); Storage storage = context.load(StorageManager.class, Meta.empty()).buildStorage(newMeta);
StorageUtils.loaderStream(storage).forEach(loader -> { StorageUtils.loaderStream(storage).forEach(loader -> {
if (loader instanceof NumassSet) { if (loader instanceof NumassSet) {
builder.putStatic(loader.getFullName().toUnescaped(), (NumassSet) loader); builder.putStatic(loader.getFullName().toUnescaped(), (NumassSet) loader);

View File

@ -29,13 +29,13 @@ public class NumassStorageFactory implements StorageType {
*/ */
@NotNull @NotNull
public static FileStorage buildLocal(Context context, Path file, boolean readOnly, boolean monitor) { public static FileStorage buildLocal(Context context, Path file, boolean readOnly, boolean monitor) {
StorageManager manager = context.loadFeature("hep.dataforge:storage", StorageManager.class); StorageManager manager = context.load(StorageManager.class, Meta.empty());
return (FileStorage) manager.buildStorage(buildStorageMeta(file.toUri(), readOnly, monitor)); return (FileStorage) manager.buildStorage(buildStorageMeta(file.toUri(), readOnly, monitor));
} }
@NotNull @NotNull
public static FileStorage buildLocal(Context context, String path, boolean readOnly, boolean monitor) { public static FileStorage buildLocal(Context context, String path, boolean readOnly, boolean monitor) {
Path file = context.getIo().getDataFile(path); Path file = context.getIo().getDataDir().resolve(path);
return buildLocal(context, file, readOnly, monitor); return buildLocal(context, file, readOnly, monitor);
} }

View File

@ -31,5 +31,5 @@ try {
} catch (Exception ex) { } catch (Exception ex) {
ex.printStackTrace(); ex.printStackTrace();
} finally { } finally {
Global.terminate(); Global.INSTANCE.terminate();
} }

View File

@ -13,7 +13,6 @@ import hep.dataforge.stat.fit.ParamSet
import hep.dataforge.stat.models.XYModel import hep.dataforge.stat.models.XYModel
import hep.dataforge.stat.parametric.ParametricFunction import hep.dataforge.stat.parametric.ParametricFunction
import hep.dataforge.tables.Table import hep.dataforge.tables.Table
import inr.numass.NumassIOKt
import inr.numass.NumassPlugin import inr.numass.NumassPlugin
import inr.numass.data.SpectrumAdapter import inr.numass.data.SpectrumAdapter
import inr.numass.data.SpectrumGenerator import inr.numass.data.SpectrumGenerator
@ -23,7 +22,6 @@ import inr.numass.models.misc.ModGauss
import inr.numass.models.sterile.NumassBeta import inr.numass.models.sterile.NumassBeta
import inr.numass.utils.DataModelUtils import inr.numass.utils.DataModelUtils
import static hep.dataforge.grind.Grind.morph
Context ctx = Global.instance() Context ctx = Global.instance()
ctx.getPluginManager().load(FXPlotManager) ctx.getPluginManager().load(FXPlotManager)

View File

@ -48,7 +48,7 @@ public class PlotFitResultAction extends OneToOneAction<FitResult, FitResult> {
NavigableValuesSource data = input.getData(); NavigableValuesSource data = input.getData();
if (!(state.getModel() instanceof XYModel)) { if (!(state.getModel() instanceof XYModel)) {
context.getChronicle(name).reportError("The fit model should be instance of XYModel for this action. Action failed!"); context.getHistory().getChronicle(name).reportError("The fit model should be instance of XYModel for this action. Action failed!");
return input; return input;
} }
XYModel model = (XYModel) state.getModel(); XYModel model = (XYModel) state.getModel();

View File

@ -19,7 +19,6 @@ import hep.dataforge.actions.ActionUtils;
import hep.dataforge.context.Context; import hep.dataforge.context.Context;
import hep.dataforge.context.Global; import hep.dataforge.context.Global;
import hep.dataforge.context.IOManager; import hep.dataforge.context.IOManager;
import hep.dataforge.data.FileDataFactory;
import hep.dataforge.io.MetaFileReader; import hep.dataforge.io.MetaFileReader;
import hep.dataforge.meta.Meta; import hep.dataforge.meta.Meta;
import org.apache.commons.cli.*; import org.apache.commons.cli.*;
@ -34,7 +33,6 @@ import java.io.FileNotFoundException;
import java.nio.file.Files; import java.nio.file.Files;
import java.util.Locale; import java.util.Locale;
import static hep.dataforge.context.Global.out;
import static inr.numass.Numass.printDescription; import static inr.numass.Numass.printDescription;
import static java.util.Locale.setDefault; import static java.util.Locale.setDefault;
@ -52,7 +50,7 @@ public class Main {
public static void run(Context context, String[] args) throws Exception { public static void run(Context context, String[] args) throws Exception {
if(context == null){ if(context == null){
context = Global.Companion.instance(); context = Global.INSTANCE;
} }
Logger logger = LoggerFactory.getLogger("numass-main"); Logger logger = LoggerFactory.getLogger("numass-main");
@ -78,7 +76,7 @@ public class Main {
if (args.length == 0) { if (args.length == 0) {
HelpFormatter formatter = new HelpFormatter(); HelpFormatter formatter = new HelpFormatter();
formatter.printHelp("java -jar DataReader.jar [OPTIONS]", options); formatter.printHelp("java -jar DataReader.jar [OPTIONS]", options);
Companion.out().println("Trying to use default config location..."); System.out.println("Trying to use default config location...");
} }
if (line.hasOption("c")) { if (line.hasOption("c")) {
@ -88,7 +86,7 @@ public class Main {
return; return;
} }
java.nio.file.Path configFile = context.getIo().getFile(cfgPath); java.nio.file.Path configFile = context.getIo().getRootDir().resolve(cfgPath);
if (!Files.exists(configFile)) { if (!Files.exists(configFile)) {
throw new FileNotFoundException("Configuration file not found"); throw new FileNotFoundException("Configuration file not found");
@ -119,7 +117,7 @@ public class Main {
dataDir = new File(workDir, dataPath); dataDir = new File(workDir, dataPath);
} }
if (dataDir.exists() && dataDir.isDirectory()) { if (dataDir.exists() && dataDir.isDirectory()) {
context.setValue(FileDataFactory.Companion.getDATA_DIR_KEY(), dataDir.getAbsolutePath()); context.setValue(IOManager.DATA_DIRECTORY_CONTEXT_KEY, dataDir.getAbsolutePath());
} else { } else {
throw new FileNotFoundException("Data directory not found"); throw new FileNotFoundException("Data directory not found");
} }
@ -134,7 +132,7 @@ public class Main {
if (!outDir.exists()) { if (!outDir.exists()) {
outDir.mkdirs(); outDir.mkdirs();
} }
context.setValue(NumassIO.Companion.getNUMASS_OUTPUT_CONTEXT_KEY(), outDir.toString()); context.setValue(IOManager.WORK_DIRECTORY_CONTEXT_KEY, outDir.toString());
} }
} }

View File

@ -17,6 +17,7 @@ package inr.numass;
import hep.dataforge.actions.ActionManager; import hep.dataforge.actions.ActionManager;
import hep.dataforge.context.Context; import hep.dataforge.context.Context;
import hep.dataforge.context.ContextBuilder;
import hep.dataforge.context.Global; import hep.dataforge.context.Global;
import hep.dataforge.description.ActionDescriptor; import hep.dataforge.description.ActionDescriptor;
import hep.dataforge.description.Descriptors; import hep.dataforge.description.Descriptors;
@ -31,14 +32,14 @@ import hep.dataforge.meta.Meta;
public class Numass { public class Numass {
public static Context buildContext(Context parent, Meta meta) { public static Context buildContext(Context parent, Meta meta) {
return Context.Companion.builder("NUMASS", parent) return new ContextBuilder("NUMASS", parent)
.properties(meta) .properties(meta)
.plugin(NumassPlugin.class) .plugin(NumassPlugin.class)
.build(); .build();
} }
public static Context buildContext() { public static Context buildContext() {
return buildContext(Global.Companion.instance(), Meta.empty()); return buildContext(Global.INSTANCE, Meta.empty());
} }
public static void printDescription(Context context) throws DescriptorException { public static void printDescription(Context context) throws DescriptorException {
@ -66,6 +67,6 @@ public class Numass {
builder.text("***End of actions list***", "red"); builder.text("***End of actions list***", "red");
context.getIo().getMarkupRenderer().render(builder.build()); context.getIo().getOutput().push(builder.build(), Meta.empty());
} }
} }

View File

@ -58,7 +58,7 @@ public class MonitorCorrectAction extends OneToOneAction<Table, Table> {
TreeMap<Instant, Values> index = getMonitorIndex(monitor, sourceData); TreeMap<Instant, Values> index = getMonitorIndex(monitor, sourceData);
if (index.isEmpty()) { if (index.isEmpty()) {
context.getChronicle(name).reportError("No monitor points found"); context.getHistory().getChronicle(name).reportError("No monitor points found");
return sourceData; return sourceData;
} }
double norm = 0; double norm = 0;
@ -129,7 +129,7 @@ public class MonitorCorrectAction extends OneToOneAction<Table, Table> {
// } // }
Table res = new ListTable(dataList); Table res = new ListTable(dataList);
output(context, name, stream -> NumassUtils.INSTANCE.write(stream, meta, res)); context.getIo().output(name, getName()).push(NumassUtils.INSTANCE.wrap(res, meta), Meta.empty());
return res; return res;
} }
@ -194,7 +194,7 @@ public class MonitorCorrectAction extends OneToOneAction<Table, Table> {
String monitorFileName = meta.getString("monitorFile", "monitor"); String monitorFileName = meta.getString("monitorFile", "monitor");
ListTable data = new ListTable(monitorPoints); ListTable data = new ListTable(monitorPoints);
output(context, monitorFileName, stream -> NumassUtils.INSTANCE.write(stream, meta, data)); context.getIo().output(monitorFileName, getName()).push(NumassUtils.INSTANCE.wrap(data, meta), Meta.empty());
// ColumnedDataWriter.writeTable(stream, TableTransform.sort(data, "Timestamp", true), "Monitor points", monitorNames); // ColumnedDataWriter.writeTable(stream, TableTransform.sort(data, "Timestamp", true), "Monitor points", monitorNames);
} }
} }

View File

@ -10,6 +10,7 @@ import hep.dataforge.context.Context;
import hep.dataforge.description.TypedActionDef; import hep.dataforge.description.TypedActionDef;
import hep.dataforge.io.ColumnedDataReader; import hep.dataforge.io.ColumnedDataReader;
import hep.dataforge.meta.Laminate; import hep.dataforge.meta.Laminate;
import hep.dataforge.meta.Meta;
import hep.dataforge.tables.ListTable; import hep.dataforge.tables.ListTable;
import hep.dataforge.tables.Table; import hep.dataforge.tables.Table;
import hep.dataforge.tables.ValueMap; import hep.dataforge.tables.ValueMap;
@ -30,7 +31,7 @@ public class SubstractSpectrumAction extends OneToOneAction<Table, Table> {
protected Table execute(Context context, String name, Table input, Laminate inputMeta) { protected Table execute(Context context, String name, Table input, Laminate inputMeta) {
try { try {
String referencePath = inputMeta. getString("file", "empty.dat"); String referencePath = inputMeta. getString("file", "empty.dat");
Path referenceFile = context.getIo().getFile(referencePath); Path referenceFile = context.getIo().getRootDir().resolve(referencePath);
Table referenceTable = new ColumnedDataReader(referenceFile).toTable(); Table referenceTable = new ColumnedDataReader(referenceFile).toTable();
ListTable.Builder builder = new ListTable.Builder(input.getFormat()); ListTable.Builder builder = new ListTable.Builder(input.getFormat());
input.getRows().forEach(point -> { input.getRows().forEach(point -> {
@ -47,7 +48,8 @@ public class SubstractSpectrumAction extends OneToOneAction<Table, Table> {
}); });
Table res = builder.build(); Table res = builder.build();
output(context,name, stream -> NumassUtils.INSTANCE.write(stream,inputMeta,res));
context.getIo().output(name, getName()).push(NumassUtils.INSTANCE.wrap(res, inputMeta), Meta.empty());
return res; return res;
} catch (IOException ex) { } catch (IOException ex) {
throw new RuntimeException("Could not read reference file", ex); throw new RuntimeException("Could not read reference file", ex);

View File

@ -1,92 +0,0 @@
/*
* Copyright 2015 Alexander Nozik.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package inr.numass.data;
import hep.dataforge.context.Global;
import java.io.BufferedReader;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.text.ParseException;
import java.time.LocalDateTime;
import java.util.ArrayList;
import java.util.List;
import static java.time.temporal.ChronoUnit.SECONDS;
/**
* Заплатка для задания поправки на масс-спектрометр
*
* @author Darksnake
*/
public class MonitorCorrector {
private final double average;
private final List<MonitorPoint> list;
public MonitorCorrector(String path) throws ParseException, IOException {
this(Global.Companion.instance().getIo().getFile(path));
}
public MonitorCorrector(Path monitorFile) throws ParseException, IOException {
list = new ArrayList<>();
BufferedReader reader = new BufferedReader(Files.newBufferedReader(monitorFile));
// Scanner sc = new Scanner(monitorFile);
double sum = 0;
String str = reader.readLine();
while ((str!=null)&&(!str.isEmpty())) {
MonitorPoint point = new MonitorPoint(str);
str = reader.readLine();
list.add(point);
sum += point.getMonitorValue();
}
average = sum / list.size();
}
/**
* возвращает ближайшую по времени точку
*
* @param time
* @return
*/
public MonitorPoint findNearestMonitorPoint(LocalDateTime time) {
MonitorPoint nearest = this.list.get(0);
for (MonitorPoint point : this.list) {
if (Math.abs(point.getTime().until(time, SECONDS))
< Math.abs(nearest.getTime().until(time, SECONDS))) {
nearest = point;
}
}
return nearest;
}
public double getCorrection(LocalDateTime start, double length) {
LocalDateTime finish = start.plusSeconds((long) length);
return (findNearestMonitorPoint(start).getMonitorValue() + findNearestMonitorPoint(finish).getMonitorValue()) / 2 / average;
}
public double getCorrectionError(LocalDateTime start, double length) {
LocalDateTime finish = start.plusSeconds((long) length);
return (findNearestMonitorPoint(start).getMonitorError() + findNearestMonitorPoint(finish).getMonitorError()) / 2 / average;
}
}

View File

@ -40,7 +40,7 @@ public class TransmissionInterpolator implements UnivariateFunction {
public static TransmissionInterpolator fromFile(Context context, String path, String xName, String yName, int nSmooth, double w, double border) { public static TransmissionInterpolator fromFile(Context context, String path, String xName, String yName, int nSmooth, double w, double border) {
try { try {
Path dataFile = context.getIo().getFile(path); Path dataFile = context.getIo().getRootDir().resolve(path);
ColumnedDataReader reader = new ColumnedDataReader(Files.newInputStream(dataFile)); ColumnedDataReader reader = new ColumnedDataReader(Files.newInputStream(dataFile));
return new TransmissionInterpolator(reader, xName, yName, nSmooth, w, border); return new TransmissionInterpolator(reader, xName, yName, nSmooth, w, border);
} catch (IOException ex) { } catch (IOException ex) {

View File

@ -40,7 +40,7 @@ public class OldDataReader {
public static Table readConfig(String path) throws IOException { public static Table readConfig(String path) throws IOException {
String[] list = {"X", "time", "ushift"}; String[] list = {"X", "time", "ushift"};
ListTable.Builder res = new ListTable.Builder(list); ListTable.Builder res = new ListTable.Builder(list);
Path file = Global.Companion.instance().getIo().getFile(path); Path file = Global.INSTANCE.getIo().getRootDir().resolve(path);
Scanner sc = new Scanner(file); Scanner sc = new Scanner(file);
sc.nextLine(); sc.nextLine();
@ -62,7 +62,7 @@ public class OldDataReader {
public static Table readData(String path, double Elow) { public static Table readData(String path, double Elow) {
SpectrumAdapter factory = new SpectrumAdapter(Meta.empty()); SpectrumAdapter factory = new SpectrumAdapter(Meta.empty());
ListTable.Builder res = new ListTable.Builder(Adapters.getFormat(factory)); ListTable.Builder res = new ListTable.Builder(Adapters.getFormat(factory));
Path file = Global.Companion.instance().getIo().getFile(path); Path file = Global.INSTANCE.getIo().getRootDir().resolve(path);
double x; double x;
int count; int count;
int time; int time;
@ -114,7 +114,7 @@ public class OldDataReader {
public static Table readDataAsGun(String path, double Elow) { public static Table readDataAsGun(String path, double Elow) {
SpectrumAdapter factory = new SpectrumAdapter(Meta.empty()); SpectrumAdapter factory = new SpectrumAdapter(Meta.empty());
ListTable.Builder res = new ListTable.Builder(Adapters.getFormat(factory)); ListTable.Builder res = new ListTable.Builder(Adapters.getFormat(factory));
Path file = Global.Companion.instance().getIo().getFile(path); Path file = Global.INSTANCE.getIo().getRootDir().resolve(path);
double x; double x;
long count; long count;
int time; int time;
@ -147,7 +147,7 @@ public class OldDataReader {
public static Table readSpectrumData(String path) { public static Table readSpectrumData(String path) {
SpectrumAdapter factory = new SpectrumAdapter(Meta.empty()); SpectrumAdapter factory = new SpectrumAdapter(Meta.empty());
ListTable.Builder res = new ListTable.Builder(Adapters.getFormat(factory)); ListTable.Builder res = new ListTable.Builder(Adapters.getFormat(factory));
Path file = Global.Companion.instance().getIo().getFile(path); Path file = Global.INSTANCE.getIo().getRootDir().resolve(path);
double x; double x;
double count; double count;
double time; double time;

View File

@ -1,179 +0,0 @@
/*
* Copyright 2015 Alexander Nozik.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package inr.numass
import ch.qos.logback.classic.LoggerContext
import ch.qos.logback.classic.encoder.PatternLayoutEncoder
import ch.qos.logback.classic.spi.ILoggingEvent
import ch.qos.logback.core.Appender
import ch.qos.logback.core.FileAppender
import hep.dataforge.context.Context
import hep.dataforge.context.DefaultIOManager
import hep.dataforge.context.IOManager
import hep.dataforge.fx.plots.plusAssign
import hep.dataforge.meta.Meta
import hep.dataforge.names.Name
import hep.dataforge.plots.PlotUtils
import hep.dataforge.plots.XYFunctionPlot
import hep.dataforge.plots.data.DataPlot
import hep.dataforge.stat.fit.FitResult
import hep.dataforge.stat.models.XYModel
import hep.dataforge.tables.Adapters
import hep.dataforge.utils.ReferenceRegistry
import org.apache.commons.io.output.TeeOutputStream
import org.slf4j.LoggerFactory
import java.io.File
import java.io.IOException
import java.io.OutputStream
import java.nio.file.Files
import java.nio.file.Path
import java.util.*
/**
* @author Darksnake
*/
class NumassIO : DefaultIOManager() {
internal var registry = ReferenceRegistry<OutputStream>()
// FileAppender<ILoggingEvent> appender;
override fun attach(context: Context) {
super.attach(context)
}
override fun createLoggerAppender(): Appender<ILoggingEvent> {
val lc = LoggerFactory.getILoggerFactory() as LoggerContext
val ple = PatternLayoutEncoder()
ple.pattern = "%date %level [%thread] %logger{10} [%file:%line] %msg%n"
ple.context = lc
ple.start()
val appender = FileAppender<ILoggingEvent>()
appender.file = File(workDir.toFile(), meta.getString("logFileName", "numass.log")).toString()
appender.encoder = ple
return appender
}
override fun detach() {
super.detach()
registry.forEach { it ->
try {
it.close()
} catch (e: IOException) {
LoggerFactory.getLogger(javaClass).error("Failed to close output", e)
}
}
}
private fun getExtension(type: String): String {
return when (type) {
IOManager.DEFAULT_OUTPUT_TYPE -> ".out"
else -> "." + type
}
}
override fun out(stage: Name?, name: Name, type: String): OutputStream {
val tokens = ArrayList<String>()
if (context.hasValue("numass.path")) {
val path = context.getString("numass.path")
if (path.contains(".")) {
tokens.addAll(Arrays.asList(*path.split(".".toRegex()).dropLastWhile { it.isEmpty() }.toTypedArray()))
} else {
tokens.add(path)
}
}
if (stage != null && stage.length != 0) {
tokens.addAll(Arrays.asList(*stage.asArray()))
}
val dirName = tokens.joinToString(File.separator)
val fileName = name.toString() + getExtension(type)
val out = buildOut(workDir, dirName, fileName)
registry.add(out)
return out
}
private fun buildOut(parentDir: Path, dirName: String?, fileName: String): OutputStream {
val outputFile: Path
if (!Files.exists(parentDir)) {
throw RuntimeException("Working directory does not exist")
}
try {
val dir = if (dirName.isNullOrEmpty()) {
parentDir
} else {
parentDir.resolve(dirName).also {
Files.createDirectories(it)
}
}
// String output = source.meta().getString("output", this.meta().getString("output", fileName + ".onComplete"));
outputFile = dir.resolve(fileName)
return if (context.getBoolean("numass.consoleOutput", false)) {
TeeOutputStream(Files.newOutputStream(outputFile), System.out)
} else {
Files.newOutputStream(outputFile)
}
} catch (ex: IOException) {
throw RuntimeException(ex)
}
}
companion object {
val NUMASS_OUTPUT_CONTEXT_KEY = "numass.outputDir"
}
}
fun FitResult.display(context: Context, stage: String = "fit") {
val model = optModel(context).get() as XYModel
val adapter = model.adapter
val frame = PlotUtils.getPlotManager(context)
.getPlotFrame(stage, "plot", Meta.empty())
val func = { x: Double -> model.spectrum.value(x, parameters) }
val fit = XYFunctionPlot("fit",func)
fit.density = 100
// ensuring all data points are calculated explicitly
data.rows.map { dp -> Adapters.getXValue(adapter, dp).doubleValue() }.sorted().forEach { fit.calculateIn(it) }
frame.add(fit)
frame.add(DataPlot.plot("data", adapter, data))
val residualsFrame = PlotUtils.getPlotManager(context)
.getPlotFrame(stage, "residuals", Meta.empty())
val residual = DataPlot("residuals");
data.rows.forEach {
val x = Adapters.getXValue(adapter, it).doubleValue()
val y = Adapters.getYValue(adapter, it).doubleValue()
val err = Adapters.optYError(adapter,it).orElse(1.0)
residual += Adapters.buildXYDataPoint(x, (y - func(x)) / err, 1.0)
}
residualsFrame.add(residual)
}

View File

@ -41,7 +41,7 @@ import org.apache.commons.math3.util.FastMath
@PluginDef( @PluginDef(
group = "inr.numass", group = "inr.numass",
name = "numass", name = "numass",
dependsOn = arrayOf("hep.dataforge:math", "hep.dataforge:MINUIT", "hep.dataforge:actions"), dependsOn = arrayOf("hep.dataforge:math", "hep.dataforge:MINUIT", "hep.dataforge:actions", "hep.dataforge:io.dir"),
support = false, support = false,
info = "Numass data analysis tools" info = "Numass data analysis tools"
) )
@ -50,8 +50,7 @@ class NumassPlugin : BasicPlugin() {
override fun attach(context: Context) { override fun attach(context: Context) {
// StorageManager.buildFrom(context); // StorageManager.buildFrom(context);
super.attach(context) super.attach(context)
context.pluginManager.load(NumassIO()) loadModels(context[ModelManager::class.java])
loadModels(context.get(ModelManager::class.java))
loadMath(MathPlugin.buildFrom(context)) loadMath(MathPlugin.buildFrom(context))
context.get(ActionManager::class.java).apply { context.get(ActionManager::class.java).apply {
@ -266,10 +265,8 @@ class NumassPlugin : BasicPlugin() {
} }
} }
class Factory : PluginFactory { class Factory : PluginFactory() {
override fun type(): Class<out Plugin> { override val type: Class<out Plugin> = NumassPlugin::class.java
return NumassPlugin::class.java
}
override fun build(meta: Meta): Plugin { override fun build(meta: Meta): Plugin {
return NumassPlugin() return NumassPlugin()

View File

@ -19,6 +19,7 @@ import hep.dataforge.context.Context
import hep.dataforge.data.DataNode import hep.dataforge.data.DataNode
import hep.dataforge.data.DataSet import hep.dataforge.data.DataSet
import hep.dataforge.data.binary.Binary import hep.dataforge.data.binary.Binary
import hep.dataforge.fx.plots.plusAssign
import hep.dataforge.io.envelopes.DefaultEnvelopeType import hep.dataforge.io.envelopes.DefaultEnvelopeType
import hep.dataforge.io.envelopes.Envelope import hep.dataforge.io.envelopes.Envelope
import hep.dataforge.io.envelopes.EnvelopeBuilder import hep.dataforge.io.envelopes.EnvelopeBuilder
@ -28,7 +29,13 @@ import hep.dataforge.io.markup.SimpleMarkupRenderer
import hep.dataforge.kodex.nullable import hep.dataforge.kodex.nullable
import hep.dataforge.meta.Meta import hep.dataforge.meta.Meta
import hep.dataforge.meta.MetaBuilder import hep.dataforge.meta.MetaBuilder
import hep.dataforge.plots.PlotUtils
import hep.dataforge.plots.XYFunctionPlot
import hep.dataforge.plots.data.DataPlot
import hep.dataforge.plots.jfreechart.JFreeChartFrame import hep.dataforge.plots.jfreechart.JFreeChartFrame
import hep.dataforge.stat.fit.FitResult
import hep.dataforge.stat.models.XYModel
import hep.dataforge.tables.Adapters
import hep.dataforge.tables.ListTable import hep.dataforge.tables.ListTable
import hep.dataforge.tables.Table import hep.dataforge.tables.Table
import hep.dataforge.tables.ValueMap import hep.dataforge.tables.ValueMap
@ -132,11 +139,11 @@ object NumassUtils {
} }
fun write(stream: OutputStream, meta: Meta, something: Markedup) { // fun write(stream: OutputStream, meta: Meta, something: Markedup) {
writeEnvelope(stream, meta) { out -> // writeEnvelope(stream, meta) { out ->
SimpleMarkupRenderer(out).render(something.markup(meta)) // SimpleMarkupRenderer(out).render(something.markup(meta))
} // }
} // }
/** /**
* Convert numass set to DataNode * Convert numass set to DataNode
@ -261,4 +268,39 @@ fun Values.unbox(): Map<String, Any?> {
res.put(field, obj) res.put(field, obj)
} }
return res return res
} }
fun FitResult.display(context: Context, stage: String = "fit") {
val model = optModel(context).get() as XYModel
val adapter = model.adapter
val frame = PlotUtils.getPlotManager(context)
.getPlotFrame(stage, "plot", Meta.empty())
val func = { x: Double -> model.spectrum.value(x, parameters) }
val fit = XYFunctionPlot("fit",func)
fit.density = 100
// ensuring all data points are calculated explicitly
data.rows.map { dp -> Adapters.getXValue(adapter, dp).doubleValue() }.sorted().forEach { fit.calculateIn(it) }
frame.add(fit)
frame.add(DataPlot.plot("data", adapter, data))
val residualsFrame = PlotUtils.getPlotManager(context)
.getPlotFrame(stage, "residuals", Meta.empty())
val residual = DataPlot("residuals");
data.rows.forEach {
val x = Adapters.getXValue(adapter, it).doubleValue()
val y = Adapters.getYValue(adapter, it).doubleValue()
val err = Adapters.optYError(adapter,it).orElse(1.0)
residual += Adapters.buildXYDataPoint(x, (y - func(x)) / err, 1.0)
}
residualsFrame.add(residual)
}

View File

@ -28,7 +28,6 @@ import hep.dataforge.values.Values
import inr.numass.NumassUtils import inr.numass.NumassUtils
import inr.numass.data.analyzers.NumassAnalyzer import inr.numass.data.analyzers.NumassAnalyzer
import inr.numass.data.api.NumassPoint import inr.numass.data.api.NumassPoint
import java.util.* import java.util.*
/** /**
@ -57,7 +56,8 @@ class MergeDataAction : ManyToOneAction<Table, Table>() {
} }
override fun afterGroup(context: Context, groupName: String, outputMeta: Meta, output: Table) { override fun afterGroup(context: Context, groupName: String, outputMeta: Meta, output: Table) {
output(context, groupName) { stream -> NumassUtils.write(stream, outputMeta, output) } context.io.output(groupName, name).push(NumassUtils.wrap(output, outputMeta))
super.afterGroup(context, groupName, outputMeta, output)
} }
private fun mergeDataPoints(dp1: Values?, dp2: Values?): Values? { private fun mergeDataPoints(dp1: Values?, dp2: Values?): Values? {

View File

@ -85,7 +85,7 @@ class SummaryAction : ManyToOneAction<FitState, Table>() {
weights[i] += weight weights[i] += weight
} }
values[values.size - 1] = Value.of(value.chi2) values[values.size - 1] = Value.of(value.chi2)
val point = ValueMap.of(names, *values as Array<Any>) val point = ValueMap.of(names, *values)
res.row(point) res.row(point)
} }
@ -98,13 +98,13 @@ class SummaryAction : ManyToOneAction<FitState, Table>() {
averageValues[2 * i + 2] = Value.of(1 / Math.sqrt(weights[i])) averageValues[2 * i + 2] = Value.of(1 / Math.sqrt(weights[i]))
} }
res.row(ValueMap.of(names, *averageValues as Array<Any>)) res.row(ValueMap.of(names, *averageValues))
return res.build() return res.build()
} }
override fun afterGroup(context: Context, groupName: String, outputMeta: Meta, output: Table) { override fun afterGroup(context: Context, groupName: String, outputMeta: Meta, output: Table) {
output(context, groupName) { stream -> NumassUtils.write(stream, outputMeta, output) } context.io.output(groupName, name).push(NumassUtils.wrap(output, outputMeta))
super.afterGroup(context, groupName, outputMeta, output) super.afterGroup(context, groupName, outputMeta, output)
} }

View File

@ -99,7 +99,7 @@ class TransformDataAction : OneToOneAction<Table, Table>() {
val res = table.addColumn(ListColumn.build(table.getColumn(COUNT_RATE_KEY).format, cr.stream())) val res = table.addColumn(ListColumn.build(table.getColumn(COUNT_RATE_KEY).format, cr.stream()))
.addColumn(ListColumn.build(table.getColumn(COUNT_RATE_ERROR_KEY).format, crErr.stream())) .addColumn(ListColumn.build(table.getColumn(COUNT_RATE_ERROR_KEY).format, crErr.stream()))
output(context, name) { stream -> NumassUtils.write(stream, meta, res) } context.io.output(name, name).push(NumassUtils.wrap(res, meta))
return res return res
} }

View File

@ -1,5 +1,6 @@
package inr.numass.models.mc package inr.numass.models.mc
import hep.dataforge.context.Global
import hep.dataforge.fx.plots.FXPlotManager import hep.dataforge.fx.plots.FXPlotManager
import hep.dataforge.kodex.buildMeta import hep.dataforge.kodex.buildMeta
import hep.dataforge.maths.chain.Chain import hep.dataforge.maths.chain.Chain
@ -34,7 +35,7 @@ fun main(args: Array<String>) {
.setPar("X", 0.0, 0.01, 0.0, java.lang.Double.POSITIVE_INFINITY) .setPar("X", 0.0, 0.01, 0.0, java.lang.Double.POSITIVE_INFINITY)
.setPar("trap", 1.0, 0.01, 0.0, java.lang.Double.POSITIVE_INFINITY) .setPar("trap", 1.0, 0.01, 0.0, java.lang.Double.POSITIVE_INFINITY)
val sp = SterileNeutrinoSpectrum(global, meta) val sp = SterileNeutrinoSpectrum(Global, meta)
val spectrumPlot = XYFunctionPlot.plot("spectrum", 14000.0, 18600.0, 500) { val spectrumPlot = XYFunctionPlot.plot("spectrum", 14000.0, 18600.0, 500) {
sp.value(it, allPars) sp.value(it, allPars)

View File

@ -1,8 +1,8 @@
package inr.numass.scripts.utils package inr.numass.scripts.utils
import hep.dataforge.context.Global
import hep.dataforge.io.XMLMetaWriter import hep.dataforge.io.XMLMetaWriter
import hep.dataforge.kodex.buildMeta import hep.dataforge.kodex.buildMeta
import hep.dataforge.kodex.global
import hep.dataforge.kodex.useValue import hep.dataforge.kodex.useValue
import hep.dataforge.meta.Meta import hep.dataforge.meta.Meta
import hep.dataforge.meta.MetaBuilder import hep.dataforge.meta.MetaBuilder
@ -14,7 +14,7 @@ import java.io.File
import java.nio.file.Paths import java.nio.file.Paths
private fun createSummaryNode(storage: Storage): MetaBuilder { private fun createSummaryNode(storage: Storage): MetaBuilder {
global.logger.info("Reading content of shelf {}", storage.fullName) Global.logger.info("Reading content of shelf {}", storage.fullName)
val builder = MetaBuilder("shelf") val builder = MetaBuilder("shelf")
.setValue("name", storage.name) .setValue("name", storage.name)
@ -24,7 +24,7 @@ private fun createSummaryNode(storage: Storage): MetaBuilder {
} }
storage.loaders().filterIsInstance(NumassDataLoader::class.java).forEach { set -> storage.loaders().filterIsInstance(NumassDataLoader::class.java).forEach { set ->
global.logger.info("Reading content of set {}", set.fullName) Global.logger.info("Reading content of set {}", set.fullName)
val setBuilder = MetaBuilder("set") val setBuilder = MetaBuilder("set")
.setValue("name", set.name) .setValue("name", set.name)
@ -83,14 +83,14 @@ fun main(args: Array<String>) {
output.createNewFile() output.createNewFile()
val storage = NumassStorageFactory.buildLocal(global, path, true, false) val storage = NumassStorageFactory.buildLocal(Global, path, true, false)
val summary = createSummaryNode(storage) val summary = createSummaryNode(storage)
global.logger.info("Writing output meta") Global.logger.info("Writing output meta")
output.outputStream().use { output.outputStream().use {
XMLMetaWriter().write(it, summary) XMLMetaWriter().write(it, summary)
} }
global.logger.info("Calculating statistics") Global.logger.info("Calculating statistics")
val statistics = MetaBuilder("statistics") val statistics = MetaBuilder("statistics")
(14000..18600).step(100).map { it.toDouble() }.forEach { (14000..18600).step(100).map { it.toDouble() }.forEach {
statistics.putNode(calculateStatistics(summary, it)) statistics.putNode(calculateStatistics(summary, it))

View File

@ -69,7 +69,7 @@ class NumassFitScanSummaryTask : AbstractTask<Table>() {
pars.getValue("trap")) pars.getValue("trap"))
} }
val res = TableTransform.sort(builder.build(), "m", true) val res = TableTransform.sort(builder.build(), "m", true)
output(context, nodeName) { stream -> NumassUtils.write(stream, meta, res) } context.io.output(nodeName, stage = name).push(NumassUtils.wrap(res, meta))
return res return res
} }

View File

@ -80,20 +80,17 @@ val monitorTableTask = task("monitor") {
} }
plots + DataPlot.plot(name, Adapters.buildXYAdapter("timestamp", "cr", "crErr"), res) plots + DataPlot.plot(name, Adapters.buildXYAdapter("timestamp", "cr", "crErr"), res)
}.also { frame -> }.also { frame ->
if (frame is JFreeChartFrame) { if (frame is JFreeChartFrame) {
//add set markers //add set markers
addSetMarkers(frame, data.values) addSetMarkers(frame, data.values)
} }
context.io.out("numass.monitor", name, "dfp").use { context.io.output(name, stage = "numass.monitor", type = "dfp").push(PlotFrame.Wrapper().wrap(frame))
NumassUtils.writeEnvelope(it, PlotFrame.Wrapper().wrap(frame))
} }
}
} }
} }
context.io.out("numass.monitor", name).use { context.io.output(name, stage = "numass.monitor").push(NumassUtils.wrap(res, meta))
NumassUtils.write(it, meta, res)
}
return@join res; return@join res;
} }
@ -107,9 +104,7 @@ val analyzeTask = task("analyze") {
pipe<NumassSet, Table> { set -> pipe<NumassSet, Table> { set ->
SmartAnalyzer().analyzeSet(set, meta).also { res -> SmartAnalyzer().analyzeSet(set, meta).also { res ->
val outputMeta = meta.builder.putNode("data", set.meta) val outputMeta = meta.builder.putNode("data", set.meta)
context.io.out("numass.analyze", name).use { context.io.output(name, stage = "numass.analyze").push(NumassUtils.wrap(res, outputMeta))
NumassUtils.write(it, outputMeta, res)
}
} }
} }
} }
@ -175,9 +170,7 @@ val subtractEmptyTask = task("dif") {
res.goal.onComplete { r, _ -> res.goal.onComplete { r, _ ->
if (r != null) { if (r != null) {
context.io.out("numass.merge", input.name + "_subtract").use { context.io.output(input.name + "_subtract", stage = "numass.merge").push(NumassUtils.wrap(r,resMeta))
NumassUtils.write(it, resMeta, r)
}
} }
} }
@ -228,7 +221,7 @@ val fitTask = task("fit") {
configure(meta.getMeta("fit")) configure(meta.getMeta("fit"))
} }
pipe<Table, FitResult> { data -> pipe<Table, FitResult> { data ->
context.io.out("numass.fit", name).use { out -> context.io.stream(name, "numass.fit").use { out ->
val writer = PrintWriter(out) val writer = PrintWriter(out)
writer.printf("%n*** META ***%n") writer.printf("%n*** META ***%n")
writer.println(meta.toString()) writer.println(meta.toString())

View File

@ -30,7 +30,7 @@ public class TransmissionInterpolatorTest {
public static void main(String[] args) { public static void main(String[] args) {
JFreeChartFrame frame = NumassPluginKt.displayJFreeChart("TransmissionInterpolatorTest"); JFreeChartFrame frame = NumassPluginKt.displayJFreeChart("TransmissionInterpolatorTest");
//JFreeChartFrame.drawFrame("TransmissionInterpolatorTest", null); //JFreeChartFrame.drawFrame("TransmissionInterpolatorTest", null);
TransmissionInterpolator interpolator = TransmissionInterpolator.fromFile(Global.Companion.instance(), TransmissionInterpolator interpolator = TransmissionInterpolator.fromFile(Global.INSTANCE,
"d:\\sterile-new\\loss2014-11\\.dataforge\\merge\\empty_sum.onComplete", "Uset", "CR", 15, 0.8, 19002d); "d:\\sterile-new\\loss2014-11\\.dataforge\\merge\\empty_sum.onComplete", "Uset", "CR", 15, 0.8, 19002d);
frame.add(DataPlot.plot("data", interpolator.getX(), interpolator.getY())); frame.add(DataPlot.plot("data", interpolator.getX(), interpolator.getY()));
frame.add(XYFunctionPlot.Companion.plot("interpolated", interpolator.getXmin(), interpolator.getXmax(), 2000, interpolator::value)); frame.add(XYFunctionPlot.Companion.plot("interpolated", interpolator.getXmin(), interpolator.getXmax(), 2000, interpolator::value));