working on grind multi threading and dsl

This commit is contained in:
Alexander Nozik 2016-08-12 18:04:42 +03:00
parent cb1951babd
commit 2cfc74dea8
27 changed files with 267 additions and 1355 deletions

View File

@ -1,4 +0,0 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<catalog xmlns="urn:oasis:names:tc:entity:xmlns:xml:catalog" prefer="system">
<system systemId="http://javafx.com/javafx/8.0.65" uri="www.oracle.com/technetwork/java/javase/overview/index.html"/>
</catalog>

File diff suppressed because one or more lines are too long

View File

@ -87,18 +87,18 @@ res = fm.runDefaultTask(res, "E0", "N", "bkg", "mnu2");
res.print(out);
//spectrum.counter.print(out);
//spectrum.counter.print(onComplete);
//
//// fm.setPriorProb(new GaussianPrior("X", 0.47, 0.47*0.03));
//// fm.setPriorProb(new MultivariateGaussianPrior(allPars.getSubSet("X","trap")));
//res = fm.runTask(res, "MINUIT", "run", "E0", "N", "bkg", "mnu2");
////
//res.print(out);
//res.print(onComplete);
//sp.setCaching(true);
//sp.setSuppressWarnings(true);
//
//BayesianManager bm = new BayesianManager();
//bm.printMarginalLikelihood(out, "U2", res, ["E0", "N", "bkg", "U2", "X"], 10000);
//bm.printMarginalLikelihood(onComplete, "U2", res, ["E0", "N", "bkg", "U2", "X"], 10000);
// PrintNamed.printLike2D(Out.out, "like", res, "N", "E0", 30, 60, 2);
// PrintNamed.printLike2D(Out.onComplete, "like", res, "N", "E0", 30, 60, 2);

View File

@ -72,8 +72,8 @@ allPars.setParValue("trap", 0);
allPars.setParError("trap", 0.01d);
allPars.setParDomain("trap", 0d, Double.POSITIVE_INFINITY);
// PrintNamed.printSpectrum(GlobalContext.out(), spectrum, allPars, 0.0, 18700.0, 600);
//String fileName = "d:\\PlayGround\\merge\\scans.out";
// PrintNamed.printSpectrum(GlobalContext.onComplete(), spectrum, allPars, 0.0, 18700.0, 600);
//String fileName = "d:\\PlayGround\\merge\\scans.onComplete";
// String configName = "d:\\PlayGround\\SCAN.CFG";
// ListTable config = OldDataReader.readConfig(configName);
SpectrumGenerator generator = new SpectrumGenerator(model, allPars, 12316);

View File

@ -15,9 +15,10 @@
*/
package inr.numass.scripts
import static groovy.io.FileType.*
import org.apache.commons.io.FilenameUtils
import static groovy.io.FileType.FILES
File dir = new File("D:\\loss-2014\\");
File resultDir = new File(dir, ".dataforge\\showLoss\\");
@ -33,7 +34,7 @@ resultFile.setText("name\tX\tX_err\texPos\texPos_err\tionPos\tionPos_err\texW\te
dir.eachFileMatch FILES, {it ==~ /[dh]2_\d\d_\d(?:_bkg)?\.xml/}, {
try{
inr.numass.Main.main("-c", it.getAbsolutePath())
File outFile = new File(resultDir, FilenameUtils.getBaseName(it.getName())+"_loss.out")
File outFile = new File(resultDir, FilenameUtils.getBaseName(it.getName()) + "_loss.onComplete")
resultFile.append(outFile.readLines().get(50));
resultFile.append("\r\n");
} catch(Exception ex){

View File

@ -13,22 +13,19 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package inr.numass.scripts;
package inr.numass.scripts
import hep.dataforge.meta.MetaBuilder;
import hep.dataforge.context.GlobalContext;
import hep.dataforge.stat.fit.ParamSet;
import inr.numass.data.SpectrumInformation;
import inr.numass.models.ModularSpectrum;
import inr.numass.models.BetaSpectrum;
import inr.numass.models.NBkgSpectrum;
import inr.numass.models.ResolutionFunction;
import java.util.HashMap;
import java.util.Locale;
import static java.util.Locale.setDefault;
import java.util.Map;
import org.apache.commons.math3.analysis.UnivariateFunction;
import hep.dataforge.context.GlobalContext
import hep.dataforge.meta.MetaBuilder
import hep.dataforge.stat.fit.ParamSet
import inr.numass.data.SpectrumInformation
import inr.numass.models.BetaSpectrum
import inr.numass.models.ModularSpectrum
import inr.numass.models.NBkgSpectrum
import inr.numass.models.ResolutionFunction
import org.apache.commons.math3.analysis.UnivariateFunction
import static java.util.Locale.setDefault
setDefault(Locale.US);
GlobalContext global = GlobalContext.instance();
@ -86,7 +83,7 @@ SpectrumInformation sign = new SpectrumInformation(spectrum);
// PrintNamed.printNamedMatrix(Out.out, infoMatrix);
// NamedMatrix cov = sign.getExpetedCovariance(allPars, config,"U2","E0","N");
//
// PrintWriter out = GlobalContext.out();
// PrintWriter onComplete = GlobalContext.onComplete();
//
// printNamedMatrix(out, cov);
//

View File

@ -15,7 +15,10 @@
*/
package inr.numass.scripts
import hep.dataforge.stat.fit.*
import hep.dataforge.stat.fit.FitManager
import hep.dataforge.stat.fit.FitState
import hep.dataforge.stat.fit.MINUITPlugin
import hep.dataforge.stat.fit.ParamSet
import hep.dataforge.stat.models.XYModel
import hep.dataforge.tables.ListTable
import inr.numass.data.SpectrumDataAdapter
@ -70,8 +73,8 @@ allPars.setParValue("trap", 1d);
allPars.setParError("trap", 0.01d);
allPars.setParDomain("trap", 0d, Double.POSITIVE_INFINITY);
// PrintNamed.printSpectrum(GlobalContext.out(), spectrum, allPars, 0.0, 18700.0, 600);
//String fileName = "d:\\PlayGround\\merge\\scans.out";
// PrintNamed.printSpectrum(GlobalContext.onComplete(), spectrum, allPars, 0.0, 18700.0, 600);
//String fileName = "d:\\PlayGround\\merge\\scans.onComplete";
// String configName = "d:\\PlayGround\\SCAN.CFG";
// ListTable config = OldDataReader.readConfig(configName);
SpectrumGenerator generator = new SpectrumGenerator(model, allPars, 12316);

View File

@ -16,10 +16,10 @@
package inr.numass.scripts
import hep.dataforge.context.GlobalContext
import hep.dataforge.io.FittingIOUtils
import hep.dataforge.stat.fit.FitManager
import hep.dataforge.stat.fit.ParamSet
import hep.dataforge.stat.models.XYModel
import hep.dataforge.io.FittingIOUtils
import inr.numass.data.SpectrumDataAdapter
import inr.numass.models.GunSpectrum
import inr.numass.models.NBkgSpectrum
@ -50,7 +50,7 @@ allPars.setParValue("sigma", 0.6);
FittingIOUtils.printSpectrum(new PrintWriter(System.out), spectrum, allPars, 18495, 18505, 100);
// //String fileName = "d:\\PlayGround\\merge\\scans.out";
// //String fileName = "d:\\PlayGround\\merge\\scans.onComplete";
//// String configName = "d:\\PlayGround\\SCAN.CFG";
//// ListTable config = OldDataReader.readConfig(configName);
// SpectrumGenerator generator = new SpectrumGenerator(model, allPars, 12316);
@ -63,5 +63,5 @@ FittingIOUtils.printSpectrum(new PrintWriter(System.out), spectrum, allPars, 184
//
// FitState res = fm.runTask(state, "QOW", FitTask.TASK_RUN, "N", "bkg", "pos", "sigma");
//
// res.print(out());
// res.print(onComplete());

View File

@ -77,5 +77,5 @@ FittingIOUtils.printSpectrum(GlobalContext.out(), spectrum, allPars, 14000, 1860
//
//
//
//res.print(out());
//res.print(onComplete());
//

View File

@ -1,10 +0,0 @@
package inr.numass.scripts
import hep.dataforge.grind.GrindLauncher
/**
* Created by darksnake on 11-Aug-16.
*/
new GrindLauncher().from { new File("D:\\Work\\Numass\\sterile2016\\workspace.groovy") }.runTask("numass.prepare", "fill_2")

View File

@ -69,10 +69,10 @@ allPars.setParDomain("trap", 0d, Double.POSITIVE_INFINITY);
// PlotManager pm = new PlotManager();
// String plotTitle = "Tritium spectrum";
// pm.plotFunction(FunctionUtils.getSpectrumFunction(spectrum, allPars), 14000, 18600, 500,plotTitle, null);
// PrintNamed.printSpectrum(Out.out, beta.trapping, allPars, 14000d, 18600d, 500);
// PrintNamed.printSpectrum(Out.onComplete, beta.trapping, allPars, 14000d, 18600d, 500);
// double e = 18570d;
// trans.alpha = 1e-4;
// trans.plotTransmission(System.out, allPars, e, e-1000d, e+100d, 200);
// trans.plotTransmission(System.onComplete, allPars, e, e-1000d, e+100d, 200);
SpectrumGenerator generator = new SpectrumGenerator(model, allPars);
// ColumnedDataFile file = new ColumnedDataFile("d:\\PlayGround\\RUN36.cfg");
@ -93,7 +93,7 @@ ListTable data = generator.generateData(config);
FitState state = fm.buildState(data, model, allPars);
// fm.checkDerivs();
// res.print(Out.out);
// res.print(Out.onComplete);
// fm.checkFitDerivatives();
FitState res = fm.runDefaultTask(state, "U2", "N", "trap");
@ -107,14 +107,14 @@ beta.setSuppressWarnings(true);
BayesianManager bm = new BayesianManager();
// bm.setPriorProb(new OneSidedUniformPrior("trap", 0, true));
// bm.setPriorProb(new GaussianPrior("trap", 1d, 0.002));
// bm.printMarginalLikelihood(Out.out,"U2", res);
// bm.printMarginalLikelihood(Out.onComplete,"U2", res);
FitState conf = bm.getConfidenceInterval("U2", res, ["U2", "N", "trap"]);
// plotTitle = String.format("Marginal likelihood for parameter \'%s\'", "U2");
// pm.plotFunction(bm.getMarginalLikelihood("U2", res), 0, 2e-3, 40,plotTitle, null);
conf.print(out);
// PrintNamed.printLogProbRandom(Out.out, res, 5000,0.5d, "E0","N");
// PrintNamed.printLogProbRandom(Out.onComplete, res, 5000,0.5d, "E0","N");
spectrum.counter.print(out);

View File

@ -0,0 +1,16 @@
package inr.numass.scripts.workspace
import hep.dataforge.actions.ActionUtils
import hep.dataforge.context.Context
import hep.dataforge.io.IOManager
import inr.numass.NumassPlugin
/**
* Created by darksnake on 12-Aug-16.
*/
Context context = new Context("numass");
context.loadPlugin(new NumassPlugin());
context.putValue(IOManager.ROOT_DIRECTORY_CONTEXT_KEY, "D:\\Work\\Numass\\sterile2016");
ActionUtils.runConfig(context, "test.xml").computeAll()

View File

@ -0,0 +1,13 @@
package inr.numass.scripts.workspace
import hep.dataforge.grind.GrindLauncher
/**
* Created by darksnake on 11-Aug-16.
*/
new GrindLauncher().from {
new File("D:\\Work\\Numass\\sterile2016\\workspace.groovy")
}.runTask("numass.prepare", "fill_2").computeAll()

View File

@ -17,29 +17,24 @@ package inr.numass;
import hep.dataforge.actions.ActionUtils;
import hep.dataforge.context.Context;
import hep.dataforge.context.GlobalContext;
import static hep.dataforge.context.GlobalContext.out;
import hep.dataforge.data.FileDataFactory;
import hep.dataforge.stat.fit.MINUITPlugin;
import hep.dataforge.io.IOManager;
import hep.dataforge.io.MetaFileReader;
import hep.dataforge.meta.Meta;
import static inr.numass.Numass.printDescription;
import hep.dataforge.stat.fit.MINUITPlugin;
import org.apache.commons.cli.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.swing.*;
import javax.swing.filechooser.FileFilter;
import javax.swing.filechooser.FileNameExtensionFilter;
import java.io.File;
import java.io.FileNotFoundException;
import java.util.Locale;
import javax.swing.JFileChooser;
import javax.swing.JFrame;
import javax.swing.filechooser.FileFilter;
import javax.swing.filechooser.FileNameExtensionFilter;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.DefaultParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static hep.dataforge.context.GlobalContext.out;
import static inr.numass.Numass.printDescription;
import static java.util.Locale.setDefault;
/**
@ -152,7 +147,7 @@ public class Main {
options.addOption("h", "home", true,
"Working directory (by default the working directory is the directory where config file is placed)");
options.addOption("d", "data", true, "Data directory (absolute or relative to working directory)");
options.addOption("o", "out", true, "Output directory (absolute or relative to working directory)");
options.addOption("o", "onComplete", true, "Output directory (absolute or relative to working directory)");
options.addOption("l", "list", false, "List of available actions");
options.addOption("lc", "list-color", false, "List of available actions with ANSI coloring");

View File

@ -20,19 +20,16 @@ import hep.dataforge.data.binary.Binary;
import hep.dataforge.io.BasicIOManager;
import hep.dataforge.meta.Meta;
import hep.dataforge.names.Name;
import inr.numass.storage.NumassDataReader;
import inr.numass.data.NumassPawReader;
import inr.numass.storage.NumassDataReader;
import inr.numass.storage.RawNMFile;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import org.apache.commons.io.FilenameUtils;
import org.apache.commons.io.output.TeeOutputStream;
import java.io.*;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import org.apache.commons.io.FilenameUtils;
import org.apache.commons.io.output.TeeOutputStream;
/**
*
@ -42,25 +39,8 @@ public class NumassIO extends BasicIOManager {
public static final String NUMASS_OUTPUT_CONTEXT_KEY = "numass.outputDir";
@Override
public OutputStream out(Name stage, Name name) {
List<String> tokens = new ArrayList<>();
if (getContext().hasValue("numass.path")) {
String path = getContext().getString("numass.path");
if (path.contains(".")) {
tokens.addAll(Arrays.asList(path.split(".")));
} else {
tokens.add(path);
}
}
if (stage != null) {
tokens.addAll(Arrays.asList(stage.asArray()));
}
String dirName = String.join(File.separator, tokens);
String fileName = name.removeNameSpace().toString() + ".out";
return buildOut(getWorkDirectory(), dirName, fileName);
public static RawNMFile readAsDat(Binary source, Meta config) throws IOException {
return new NumassDataReader(source, config).read();
}
// private File getOutputDir() {
@ -73,36 +53,6 @@ public class NumassIO extends BasicIOManager {
//
// }
protected OutputStream buildOut(File parentDir, String dirName, String fileName) {
File outputFile;
if (!parentDir.exists()) {
throw new RuntimeException("Working directory does not exist");
}
if (dirName != null && !dirName.isEmpty()) {
parentDir = new File(parentDir, dirName);
if (!parentDir.exists()) {
parentDir.mkdirs();
}
}
// String output = source.meta().getString("output", this.meta().getString("output", fileName + ".out"));
outputFile = new File(parentDir, fileName);
try {
if (getContext().getBoolean("numass.consoleOutput", false)) {
return new TeeOutputStream(new FileOutputStream(outputFile), System.out);
} else {
return new FileOutputStream(outputFile);
}
} catch (FileNotFoundException ex) {
throw new RuntimeException(ex);
}
}
public static RawNMFile readAsDat(Binary source, Meta config) throws IOException {
return new NumassDataReader(source, config).read();
}
public static RawNMFile readAsPaw(Binary source, Meta config) throws IOException {
return new NumassPawReader().readPaw(source, config.getString(FileDataFactory.FILE_NAME_KEY));
}
@ -126,4 +76,51 @@ public class NumassIO extends BasicIOManager {
throw new RuntimeException(ex);
}
}
@Override
public OutputStream out(Name stage, Name name) {
List<String> tokens = new ArrayList<>();
if (getContext().hasValue("numass.path")) {
String path = getContext().getString("numass.path");
if (path.contains(".")) {
tokens.addAll(Arrays.asList(path.split(".")));
} else {
tokens.add(path);
}
}
if (stage != null) {
tokens.addAll(Arrays.asList(stage.asArray()));
}
String dirName = String.join(File.separator, tokens);
String fileName = name.removeNameSpace().toString() + ".out";
return buildOut(getWorkDirectory(), dirName, fileName);
}
protected OutputStream buildOut(File parentDir, String dirName, String fileName) {
File outputFile;
if (!parentDir.exists()) {
throw new RuntimeException("Working directory does not exist");
}
if (dirName != null && !dirName.isEmpty()) {
parentDir = new File(parentDir, dirName);
if (!parentDir.exists()) {
parentDir.mkdirs();
}
}
// String output = source.meta().getString("output", this.meta().getString("output", fileName + ".onComplete"));
outputFile = new File(parentDir, fileName);
try {
if (getContext().getBoolean("numass.consoleOutput", false)) {
return new TeeOutputStream(new FileOutputStream(outputFile), System.out);
} else {
return new FileOutputStream(outputFile);
}
} catch (FileNotFoundException ex) {
throw new RuntimeException(ex);
}
}
}

View File

@ -24,21 +24,12 @@ import hep.dataforge.io.ColumnedDataWriter;
import hep.dataforge.io.reports.Reportable;
import hep.dataforge.meta.Meta;
import hep.dataforge.meta.MetaBuilder;
import hep.dataforge.tables.DataPoint;
import hep.dataforge.tables.ListTable;
import hep.dataforge.tables.MapPoint;
import hep.dataforge.tables.PointSource;
import hep.dataforge.tables.Table;
import hep.dataforge.tables.TableFormat;
import hep.dataforge.tables.*;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.Collection;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.*;
/**
*
* @author Darksnake
*/
@TypedActionDef(name = "merge", inputType = Table.class, outputType = Table.class, info = "Merge different numass data files into one.")
@ -76,7 +67,7 @@ public class MergeDataAction extends ManyToOneAction<Table, Table> {
@Override
protected MetaBuilder outputMeta(DataNode<Table> input) {
String numassPath = input.dataStream().<String>map(item -> item.getValue().meta().getString("numass.path", null))
String numassPath = input.dataStream().<String>map(data -> data.meta().getString("numass.path", null))
.reduce("", (String path, String newPath) -> {
if (path == null) {
return null;

View File

@ -28,6 +28,10 @@ import hep.dataforge.tables.ListTable;
import hep.dataforge.tables.MapPoint;
import hep.dataforge.tables.Table;
import hep.dataforge.values.Value;
import javafx.util.Pair;
import org.apache.commons.math3.analysis.interpolation.SplineInterpolator;
import org.apache.commons.math3.analysis.polynomials.PolynomialSplineFunction;
import java.io.OutputStream;
import java.time.Instant;
import java.util.ArrayList;
@ -35,9 +39,6 @@ import java.util.List;
import java.util.Map.Entry;
import java.util.TreeMap;
import java.util.concurrent.CopyOnWriteArrayList;
import javafx.util.Pair;
import org.apache.commons.math3.analysis.interpolation.SplineInterpolator;
import org.apache.commons.math3.analysis.polynomials.PolynomialSplineFunction;
/**
*
@ -45,7 +46,7 @@ import org.apache.commons.math3.analysis.polynomials.PolynomialSplineFunction;
*/
@TypedActionDef(name = "monitor", inputType = Table.class, outputType = Table.class)
@ValueDef(name = "monitorPoint", type = "NUMBER", required = true, info = "The Uset for monitor point")
@ValueDef(name = "monitorFile", info = "The outputfile for monitor points", def = "monitor.out")
@ValueDef(name = "monitorFile", info = "The outputfile for monitor points", def = "monitor.onComplete")
@ValueDef(name = "calculateRelative", info = "Calculate count rate relative to average monitor point", def = "false")
public class MonitorCorrectAction extends OneToOneAction<Table, Table> {

View File

@ -24,23 +24,21 @@ import hep.dataforge.io.XMLMetaWriter;
import hep.dataforge.io.reports.Reportable;
import hep.dataforge.meta.Laminate;
import hep.dataforge.meta.Meta;
import hep.dataforge.tables.DataPoint;
import hep.dataforge.tables.ListTable;
import hep.dataforge.tables.MapPoint;
import hep.dataforge.tables.Table;
import hep.dataforge.tables.TableFormat;
import hep.dataforge.tables.*;
import inr.numass.storage.NMPoint;
import inr.numass.storage.NumassData;
import inr.numass.storage.RawNMPoint;
import inr.numass.utils.TritiumUtils;
import static inr.numass.utils.TritiumUtils.evaluateExpression;
import inr.numass.utils.UnderflowCorrection;
import java.io.OutputStream;
import java.time.Instant;
import java.util.ArrayList;
import java.util.List;
import java.util.function.Function;
import static inr.numass.utils.TritiumUtils.evaluateExpression;
/**
*
* @author Darksnake
@ -143,7 +141,6 @@ public class PrepareDataAction extends OneToOneAction<NumassData, Table> {
* @param log
* @param point
* @param meta
* @param countRate precalculated count rate in main window
* @return
*/
private double correction(Reportable log, NMPoint point, Laminate meta) {

View File

@ -16,11 +16,6 @@
package inr.numass.actions;
import hep.dataforge.actions.OneToOneAction;
import hep.dataforge.stat.fit.FitState;
import hep.dataforge.stat.fit.FitTaskResult;
import hep.dataforge.stat.fit.Param;
import hep.dataforge.stat.fit.ParamSet;
import hep.dataforge.stat.models.Histogram;
import hep.dataforge.description.TypedActionDef;
import hep.dataforge.io.ColumnedDataWriter;
import hep.dataforge.io.PrintFunction;
@ -34,6 +29,11 @@ import hep.dataforge.plots.PlotsPlugin;
import hep.dataforge.plots.XYPlotFrame;
import hep.dataforge.plots.data.PlottableData;
import hep.dataforge.plots.data.PlottableXYFunction;
import hep.dataforge.stat.fit.FitState;
import hep.dataforge.stat.fit.FitTaskResult;
import hep.dataforge.stat.fit.Param;
import hep.dataforge.stat.fit.ParamSet;
import hep.dataforge.stat.models.Histogram;
import hep.dataforge.stat.simulation.GaussianParameterGenerator;
import hep.dataforge.tables.ListTable;
import hep.dataforge.tables.MapPoint;
@ -41,13 +41,8 @@ import hep.dataforge.tables.Table;
import hep.dataforge.tables.XYAdapter;
import hep.dataforge.values.NamedValueSet;
import inr.numass.NumassIntegrator;
import inr.numass.Numass;
import inr.numass.models.ExperimentalVariableLossSpectrum;
import inr.numass.models.LossCalculator;
import java.io.OutputStreamWriter;
import java.io.PrintWriter;
import java.nio.charset.Charset;
import java.util.Arrays;
import org.apache.commons.math3.analysis.UnivariateFunction;
import org.apache.commons.math3.analysis.interpolation.LinearInterpolator;
import org.apache.commons.math3.analysis.interpolation.UnivariateInterpolator;
@ -55,6 +50,11 @@ import org.apache.commons.math3.stat.StatUtils;
import org.apache.commons.math3.stat.descriptive.DescriptiveStatistics;
import org.slf4j.LoggerFactory;
import java.io.OutputStreamWriter;
import java.io.PrintWriter;
import java.nio.charset.Charset;
import java.util.Arrays;
/**
*
* @author darksnake
@ -65,6 +65,55 @@ public class ShowLossSpectrumAction extends OneToOneAction<FitState, FitState> {
private static final String[] names = {"X", "exPos", "ionPos", "exW", "ionW", "exIonRatio"};
public static double calcultateIonRatio(NamedValueSet set, double threshold) {
UnivariateIntegrator integrator = NumassIntegrator.getHighDensityIntegrator();
UnivariateFunction integrand = LossCalculator.getSingleScatterFunction(set);
return 1d - integrator.integrate(integrand, 5d, threshold);
}
public static Table generateSpread(PrintWriter writer, String name, NamedValueSet parameters, NamedMatrix covariance) {
int numCalls = 1000;
int gridPoints = 200;
double a = 8;
double b = 32;
double[] grid = GridCalculator.getUniformUnivariateGrid(a, b, gridPoints);
double[] upper = new double[gridPoints];
double[] lower = new double[gridPoints];
double[] dispersion = new double[gridPoints];
double[] central = new double[gridPoints];
UnivariateFunction func = LossCalculator.getSingleScatterFunction(parameters);
for (int j = 0; j < gridPoints; j++) {
central[j] = func.value(grid[j]);
}
Arrays.fill(upper, Double.NEGATIVE_INFINITY);
Arrays.fill(lower, Double.POSITIVE_INFINITY);
Arrays.fill(dispersion, 0);
GaussianParameterGenerator generator = new GaussianParameterGenerator(parameters, covariance);
for (int i = 0; i < numCalls; i++) {
func = LossCalculator.getSingleScatterFunction(generator.generate());
for (int j = 0; j < gridPoints; j++) {
double val = func.value(grid[j]);
upper[j] = Math.max(upper[j], val);
lower[j] = Math.min(lower[j], val);
dispersion[j] += (val - central[j]) * (val - central[j]) / numCalls;
}
}
String[] pointNames = {"e", "central", "lower", "upper", "dispersion"};
ListTable.Builder res = new ListTable.Builder(pointNames);
for (int i = 0; i < gridPoints; i++) {
res.row(new MapPoint(pointNames, grid[i], central[i], lower[i], upper[i], dispersion[i]));
}
return res.build();
}
@Override
protected FitState execute(Reportable log, String name, Laminate meta, FitState input) {
ParamSet pars = input.getParameters();
@ -115,7 +164,7 @@ public class ShowLossSpectrumAction extends OneToOneAction<FitState, FitState> {
// writer.println(param.toString());
// }
// writer.println();
// out.printf("Chi squared over degrees of freedom: %g/%d = %g", input.getChi2(), input.ndf(), chi2 / this.ndf());
// onComplete.printf("Chi squared over degrees of freedom: %g/%d = %g", input.getChi2(), input.ndf(), chi2 / this.ndf());
writer.println();
@ -178,12 +227,6 @@ public class ShowLossSpectrumAction extends OneToOneAction<FitState, FitState> {
return input;
}
public static double calcultateIonRatio(NamedValueSet set, double threshold) {
UnivariateIntegrator integrator = NumassIntegrator.getHighDensityIntegrator();
UnivariateFunction integrand = LossCalculator.getSingleScatterFunction(set);
return 1d - integrator.integrate(integrand, 5d, threshold);
}
private double calculateIntegralExIonRatio(Table data, double X, double integralThreshold) {
double scatterProb = 1 - Math.exp(-X);
@ -233,47 +276,4 @@ public class ShowLossSpectrumAction extends OneToOneAction<FitState, FitState> {
return new DescriptiveStatistics(res).getStandardDeviation();
}
public static Table generateSpread(PrintWriter writer, String name, NamedValueSet parameters, NamedMatrix covariance) {
int numCalls = 1000;
int gridPoints = 200;
double a = 8;
double b = 32;
double[] grid = GridCalculator.getUniformUnivariateGrid(a, b, gridPoints);
double[] upper = new double[gridPoints];
double[] lower = new double[gridPoints];
double[] dispersion = new double[gridPoints];
double[] central = new double[gridPoints];
UnivariateFunction func = LossCalculator.getSingleScatterFunction(parameters);
for (int j = 0; j < gridPoints; j++) {
central[j] = func.value(grid[j]);
}
Arrays.fill(upper, Double.NEGATIVE_INFINITY);
Arrays.fill(lower, Double.POSITIVE_INFINITY);
Arrays.fill(dispersion, 0);
GaussianParameterGenerator generator = new GaussianParameterGenerator(parameters, covariance);
for (int i = 0; i < numCalls; i++) {
func = LossCalculator.getSingleScatterFunction(generator.generate());
for (int j = 0; j < gridPoints; j++) {
double val = func.value(grid[j]);
upper[j] = Math.max(upper[j], val);
lower[j] = Math.min(lower[j], val);
dispersion[j] += (val - central[j]) * (val - central[j]) / numCalls;
}
}
String[] pointNames = {"e", "central", "lower", "upper", "dispersion"};
ListTable.Builder res = new ListTable.Builder(pointNames);
for (int i = 0; i < gridPoints; i++) {
res.row(new MapPoint(pointNames, grid[i], central[i], lower[i], upper[i], dispersion[i]));
}
return res.build();
}
}

View File

@ -0,0 +1,16 @@
package inr.numass.tasks;
import hep.dataforge.grind.JavaGrindLauncher;
import java.io.File;
/**
* Created by darksnake on 12-Aug-16.
*/
public class GrindCaller {
public static void main(String[] args) throws Exception {
JavaGrindLauncher.buildWorkspace(new File("D:\\Work\\Numass\\sterile2016\\workspace.groovy")).runTask("numass.prepare", "fill_2").computeAll();
}
}

View File

@ -8,7 +8,6 @@ package inr.numass.tasks;
import hep.dataforge.actions.ManyToOneAction;
import hep.dataforge.computation.WorkManager;
import hep.dataforge.context.Context;
import hep.dataforge.data.Data;
import hep.dataforge.data.DataNode;
import hep.dataforge.description.TypedActionDef;
import hep.dataforge.io.reports.Reportable;
@ -20,6 +19,7 @@ import hep.dataforge.tables.Table;
import hep.dataforge.workspace.GenericTask;
import hep.dataforge.workspace.TaskModel;
import hep.dataforge.workspace.TaskState;
import java.util.Map;
/**
@ -29,8 +29,8 @@ import java.util.Map;
public class NumassFitScanSummaryTask extends GenericTask {
@Override
protected TaskState transform(WorkManager.Callback callback, Context context, TaskState state, Meta config) {
return state.finish(new FitSummaryAction().withContext(context).run((DataNode<FitState>) state.getData(), config));
protected void transform(WorkManager.Callback callback, Context context, TaskState state, Meta config) {
state.finish(new FitSummaryAction().withContext(context).run((DataNode<FitState>) state.getData(), config));
}
@Override

View File

@ -23,13 +23,12 @@ import hep.dataforge.workspace.TaskModel;
import hep.dataforge.workspace.TaskState;
/**
*
* @author Alexander Nozik
*/
public class NumassFitScanTask extends GenericTask {
@Override
protected TaskState transform(WorkManager.Callback callback, Context context, TaskState state, Meta config) {
protected void transform(WorkManager.Callback callback, Context context, TaskState state, Meta config) {
String scanParameter = config.getString("scanPar", "msterile2");
Value scanValues = config.getValue("scanValues", Value.of(new String[]{"0.5, 1, 1.5, 2, 2.5, 3"}));
Action action = new FitAction().withContext(context).withParentProcess(callback.workName());
@ -42,25 +41,24 @@ public class NumassFitScanTask extends GenericTask {
}
//do fit
sourceNode.forEachDataWithType(Table.class, (name, data) -> {
sourceNode.forEachDataWithType(Table.class, data -> {
DataNode res = scanValues.listValue().stream().parallel().map(val -> {
MetaBuilder overrideMeta = new MetaBuilder("override");
overrideMeta.setValue("@resultName", String.format("%s[%s=%s]", name, scanParameter, val.stringValue()));
MetaBuilder paramMeta = MetaUtils.findNodeByValue(config, "params.param", name, scanParameter).getBuilder()
overrideMeta.setValue("@resultName", String.format("%s[%s=%s]", data.getName(), scanParameter, val.stringValue()));
MetaBuilder paramMeta = MetaUtils.findNodeByValue(config, "params.param", data.getName(), scanParameter).getBuilder()
.setValue("value", val);
overrideMeta.setNode("params.param", paramMeta);
return action.run(DataNode.of(name, data, overrideMeta), config);
return action.run(DataNode.of(data.getName(), data, overrideMeta), config);
}).collect(
() -> DataSet.builder(FitState.class),
(DataSet.Builder builder, DataNode node) -> builder.putData(node.getName(), node.getData()),
(DataSet.Builder builder1, DataSet.Builder builder2) -> builder1.putAll(builder2.getDataMap())
).build();
resultBuilder.putData(name, res.getData());
resultBuilder.putData(data.getName(), res.getData());
});
state.finish(resultBuilder.build());
return state;
}
@Override

View File

@ -6,8 +6,8 @@
package inr.numass.tasks;
import hep.dataforge.actions.Action;
import hep.dataforge.context.Context;
import hep.dataforge.computation.WorkManager;
import hep.dataforge.context.Context;
import hep.dataforge.data.DataNode;
import hep.dataforge.data.DataTree;
import hep.dataforge.meta.Meta;
@ -47,7 +47,7 @@ public class NumassPrepareTask extends GenericTask {
*/
@Override
@SuppressWarnings("unchecked")
protected TaskState transform(WorkManager.Callback callback, Context context, TaskState state, Meta config) {
protected void transform(WorkManager.Callback callback, Context context, TaskState state, Meta config) {
//acquiring initial data. Data node could not be empty
Meta dataMeta = Template.compileTemplate(config.getNode("data"), config);
DataNode<NumassData> data = runAction(new ReadNumassStorageAction(), callback, context, DataNode.empty(), dataMeta);
@ -66,13 +66,13 @@ public class NumassPrepareTask extends GenericTask {
//merging if needed
if (config.hasNode("merge")) {
DataTree.Builder resultBuilder = DataTree.builder(Table.class);
tables.dataStream().forEach(pair -> resultBuilder.putData(pair.getKey(), pair.getValue()));
// tables.dataStream().forEach(d -> resultBuilder.putData(d));
DataNode<Table> finalTables = tables;
config.getNodes("merge").forEach(mergeNode -> {
Meta mergeMeta = Template.compileTemplate(mergeNode, config);
DataNode<Table> mergeData = runAction(new MergeDataAction(), callback, context, finalTables, mergeMeta);
mergeData.dataStream().forEach(pair -> {
resultBuilder.putData("merge." + pair.getKey(), pair.getValue());
mergeData.dataStream().forEach(d -> {
resultBuilder.putData("merge." + d.getName(), d.anonymize());
});
});
tables = resultBuilder.build();
@ -84,7 +84,6 @@ public class NumassPrepareTask extends GenericTask {
}
state.finish(tables);
return state;
}
private <T, R> DataNode<R> runAction(Action<T, R> action, WorkManager.Callback callback, Context context, DataNode<T> data, Meta meta) {

View File

@ -6,11 +6,12 @@
package inr.numass.workbench;
import hep.dataforge.fx.FXDataOutputPane;
import java.io.OutputStream;
import javafx.event.Event;
import java.io.OutputStream;
/**
* A text output tab. Basically it is attached to IOManager::out
* A text output tab. Basically it is attached to IOManager::onComplete
*
* @author Alexander Nozik <altavir@gmail.com>
*/
@ -27,7 +28,7 @@ public class TextOutputTab extends OutputTab {
*/
public TextOutputTab(String name) {
super(name);
// out = new DataOutputPane();
// onComplete = new DataOutputPane();
out = new FXDataOutputPane();
setContent(out.getRoot());
setOnClosed((Event event) -> close());

View File

@ -8,10 +8,11 @@ package inr.numass.workbench;
import hep.dataforge.context.Context;
import hep.dataforge.io.IOManager;
import hep.dataforge.names.Name;
import org.apache.commons.io.output.TeeOutputStream;
import java.io.File;
import java.io.InputStream;
import java.io.OutputStream;
import org.apache.commons.io.output.TeeOutputStream;
/**
* An IOManager wrapper that redirects output to appropriate FX components
@ -32,6 +33,11 @@ public class WorkbenchIOManager implements IOManager {
return manager.getContext();
}
@Override
public void setContext(Context context) {
manager.setContext(context);
}
@Override
public File getFile(String path) {
return manager.getFile(path);
@ -63,12 +69,7 @@ public class WorkbenchIOManager implements IOManager {
@Override
public OutputStream out() {
return manager.out();
// return new ConsoleStream(holder.getLogArea(), new PrintStream(manager.out()));
}
@Override
public void setContext(Context context) {
manager.setContext(context);
// return new ConsoleStream(holder.getLogArea(), new PrintStream(manager.onComplete()));
}
}

View File

@ -31,11 +31,11 @@ public class TransmissionInterpolatorTest {
JFreeChartFrame frame = FXPlotUtils.displayJFreeChart("TransmissionInterpolatorTest", null);
//JFreeChartFrame.drawFrame("TransmissionInterpolatorTest", null);
TransmissionInterpolator interpolator = TransmissionInterpolator.fromFile(GlobalContext.instance(),
"d:\\sterile-new\\loss2014-11\\.dataforge\\merge\\empty_sum.out", "Uset", "CR", 15, 0.8, 19002d);
"d:\\sterile-new\\loss2014-11\\.dataforge\\merge\\empty_sum.onComplete", "Uset", "CR", 15, 0.8, 19002d);
frame.add(PlottableData.plot("data", interpolator.getX(), interpolator.getY()));
frame.add(PlottableXYFunction.plotFunction("interpolated", x->interpolator.value(x), interpolator.getXmin(), interpolator.getXmax(), 2000));
// PrintFunction.printFuntionSimple(new PrintWriter(System.out), interpolator, interpolator.getXmin(), interpolator.getXmax(), 500);
// PrintFunction.printFuntionSimple(new PrintWriter(System.onComplete), interpolator, interpolator.getXmin(), interpolator.getXmax(), 500);
}
}

View File

@ -20,37 +20,22 @@ package inr.numass.viewer;
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
import hep.dataforge.context.Context;
import hep.dataforge.computation.WorkManager;
import hep.dataforge.context.Context;
import hep.dataforge.io.ColumnedDataWriter;
import hep.dataforge.meta.Meta;
import hep.dataforge.meta.MetaBuilder;
import hep.dataforge.plots.PlotFrame;
import hep.dataforge.plots.XYPlotFrame;
import hep.dataforge.plots.data.ChangeablePlottableData;
import hep.dataforge.plots.data.DynamicPlottable;
import hep.dataforge.plots.data.DynamicPlottableSet;
import hep.dataforge.plots.data.PlotDataUtils;
import hep.dataforge.plots.data.PlottableData;
import hep.dataforge.plots.data.*;
import hep.dataforge.plots.fx.PlotContainer;
import hep.dataforge.plots.jfreechart.JFreeChartFrame;
import hep.dataforge.storage.commons.JSONMetaWriter;
import hep.dataforge.tables.DataPoint;
import hep.dataforge.tables.ListTable;
import hep.dataforge.tables.MapPoint;
import hep.dataforge.tables.Table;
import hep.dataforge.tables.XYAdapter;
import hep.dataforge.tables.*;
import inr.numass.storage.NMPoint;
import inr.numass.storage.NumassData;
import inr.numass.utils.TritiumUtils;
import java.io.File;
import java.io.IOException;
import java.net.URL;
import java.util.List;
import java.util.ResourceBundle;
import java.util.function.Predicate;
import java.util.function.Supplier;
import java.util.stream.Collectors;
import javafx.application.Platform;
import javafx.beans.value.ChangeListener;
import javafx.beans.value.ObservableValue;
@ -61,14 +46,7 @@ import javafx.fxml.FXMLLoader;
import javafx.fxml.Initializable;
import javafx.geometry.Insets;
import javafx.geometry.Orientation;
import javafx.scene.control.Button;
import javafx.scene.control.CheckBox;
import javafx.scene.control.ChoiceBox;
import javafx.scene.control.Label;
import javafx.scene.control.Separator;
import javafx.scene.control.Tab;
import javafx.scene.control.TextArea;
import javafx.scene.control.TextField;
import javafx.scene.control.*;
import javafx.scene.layout.AnchorPane;
import javafx.scene.layout.VBox;
import javafx.stage.FileChooser;
@ -80,6 +58,15 @@ import org.controlsfx.validation.Validator;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.IOException;
import java.net.URL;
import java.util.List;
import java.util.ResourceBundle;
import java.util.function.Predicate;
import java.util.function.Supplier;
import java.util.stream.Collectors;
/**
* FXML Controller class
*
@ -387,7 +374,7 @@ public class NumassLoaderViewComponent extends AnchorPane implements Initializab
if (points != null && !points.isEmpty()) {
FileChooser fileChooser = new FileChooser();
fileChooser.setTitle("Choose text export destination");
fileChooser.setInitialFileName(data.getName() + "_spectrum.out");
fileChooser.setInitialFileName(data.getName() + "_spectrum.onComplete");
File destination = fileChooser.showSaveDialog(spectrumPlotPane.getScene().getWindow());
if (destination != null) {
String[] names = new String[]{"Uset", "Uread", "Length", "Total", "Window", "CR", "CRerr", "Timestamp"};
@ -428,7 +415,7 @@ public class NumassLoaderViewComponent extends AnchorPane implements Initializab
private void onExportButtonClick(ActionEvent event) {
FileChooser fileChooser = new FileChooser();
fileChooser.setTitle("Choose text export destination");
fileChooser.setInitialFileName(data.getName() + "_detector.out");
fileChooser.setInitialFileName(data.getName() + "_detector.onComplete");
File destination = fileChooser.showSaveDialog(detectorPlotPane.getScene().getWindow());
if (destination != null) {
Table detectorData = PlotDataUtils.collectXYDataFromPlot((XYPlotFrame) detectorPlot.getPlot(), true);