Minor functions adjustment
This commit is contained in:
parent
b6af0f612c
commit
19d2f95121
@ -26,7 +26,6 @@ import hep.dataforge.values.Value;
|
|||||||
import hep.dataforge.values.Values;
|
import hep.dataforge.values.Values;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*
|
|
||||||
* @author Darksnake
|
* @author Darksnake
|
||||||
*/
|
*/
|
||||||
public class SpectrumDataAdapter extends XYAdapter {
|
public class SpectrumDataAdapter extends XYAdapter {
|
||||||
@ -65,13 +64,13 @@ public class SpectrumDataAdapter extends XYAdapter {
|
|||||||
|
|
||||||
public Values buildSpectrumDataPoint(double x, long count, double t) {
|
public Values buildSpectrumDataPoint(double x, long count, double t) {
|
||||||
return ValueMap.of(new String[]{nameFor(X_VALUE_KEY), nameFor(Y_VALUE_KEY),
|
return ValueMap.of(new String[]{nameFor(X_VALUE_KEY), nameFor(Y_VALUE_KEY),
|
||||||
nameFor(POINT_LENGTH_NAME)},
|
nameFor(POINT_LENGTH_NAME)},
|
||||||
x, count, t);
|
x, count, t);
|
||||||
}
|
}
|
||||||
|
|
||||||
public Values buildSpectrumDataPoint(double x, long count, double countErr, double t) {
|
public Values buildSpectrumDataPoint(double x, long count, double countErr, double t) {
|
||||||
return ValueMap.of(new String[]{nameFor(X_VALUE_KEY), nameFor(Y_VALUE_KEY),
|
return ValueMap.of(new String[]{nameFor(X_VALUE_KEY), nameFor(Y_VALUE_KEY),
|
||||||
nameFor(Y_ERROR_KEY), nameFor(POINT_LENGTH_NAME)},
|
nameFor(Y_ERROR_KEY), nameFor(POINT_LENGTH_NAME)},
|
||||||
x, count, countErr, t);
|
x, count, countErr, t);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -86,8 +85,11 @@ public class SpectrumDataAdapter extends XYAdapter {
|
|||||||
return Value.of(super.getYerr(point).doubleValue() / getTime(point));
|
return Value.of(super.getYerr(point).doubleValue() / getTime(point));
|
||||||
} else {
|
} else {
|
||||||
double y = super.getY(point).doubleValue();
|
double y = super.getY(point).doubleValue();
|
||||||
if (y <= 0) {
|
if (y < 0) {
|
||||||
throw new DataFormatException();
|
throw new DataFormatException();
|
||||||
|
} else if (y == 0) {
|
||||||
|
//avoid infinite weights
|
||||||
|
return Value.of(1d / getTime(point));
|
||||||
} else {
|
} else {
|
||||||
return Value.of(Math.sqrt(y) / getTime(point));
|
return Value.of(Math.sqrt(y) / getTime(point));
|
||||||
}
|
}
|
||||||
|
@ -3,20 +3,24 @@ package inr.numass.scripts.models
|
|||||||
import hep.dataforge.context.Context
|
import hep.dataforge.context.Context
|
||||||
import hep.dataforge.context.Global
|
import hep.dataforge.context.Global
|
||||||
import hep.dataforge.fx.plots.PlotManager
|
import hep.dataforge.fx.plots.PlotManager
|
||||||
import hep.dataforge.grind.Grind
|
|
||||||
import hep.dataforge.grind.GrindShell
|
import hep.dataforge.grind.GrindShell
|
||||||
import hep.dataforge.grind.helpers.PlotHelper
|
import hep.dataforge.grind.helpers.PlotHelper
|
||||||
import hep.dataforge.meta.Meta
|
import hep.dataforge.meta.Meta
|
||||||
|
import hep.dataforge.stat.fit.FitManager
|
||||||
|
import hep.dataforge.stat.fit.FitStage
|
||||||
|
import hep.dataforge.stat.fit.FitState
|
||||||
import hep.dataforge.stat.fit.ParamSet
|
import hep.dataforge.stat.fit.ParamSet
|
||||||
import hep.dataforge.stat.models.XYModel
|
import hep.dataforge.stat.models.XYModel
|
||||||
import hep.dataforge.stat.parametric.ParametricFunction
|
import hep.dataforge.stat.parametric.ParametricFunction
|
||||||
|
import hep.dataforge.tables.Table
|
||||||
import inr.numass.NumassPlugin
|
import inr.numass.NumassPlugin
|
||||||
|
import inr.numass.data.SpectrumDataAdapter
|
||||||
import inr.numass.data.SpectrumGenerator
|
import inr.numass.data.SpectrumGenerator
|
||||||
import inr.numass.models.GaussResolution
|
|
||||||
import inr.numass.models.NBkgSpectrum
|
import inr.numass.models.NBkgSpectrum
|
||||||
|
import inr.numass.models.NumassModelsKt
|
||||||
|
import inr.numass.models.misc.GaussFunction
|
||||||
import inr.numass.models.sterile.NumassBeta
|
import inr.numass.models.sterile.NumassBeta
|
||||||
import inr.numass.models.sterile.NumassTransmission
|
import inr.numass.utils.DataModelUtils
|
||||||
import inr.numass.models.sterile.SterileNeutrinoSpectrum
|
|
||||||
|
|
||||||
import static hep.dataforge.grind.Grind.morph
|
import static hep.dataforge.grind.Grind.morph
|
||||||
|
|
||||||
@ -25,18 +29,14 @@ ctx.getPluginManager().load(PlotManager)
|
|||||||
ctx.getPluginManager().load(NumassPlugin)
|
ctx.getPluginManager().load(NumassPlugin)
|
||||||
|
|
||||||
new GrindShell(ctx).eval {
|
new GrindShell(ctx).eval {
|
||||||
ParametricFunction spectrum = new SterileNeutrinoSpectrum(
|
def beta = new NumassBeta().getSpectrum(0)
|
||||||
ctx,
|
def response = new GaussFunction(4.0)
|
||||||
Grind.buildMeta(useFSS: false),
|
ParametricFunction spectrum = NumassModelsKt.convolute(beta, response)
|
||||||
new NumassBeta(),
|
|
||||||
new NumassTransmission(ctx, Meta.empty()),
|
|
||||||
new GaussResolution(5d)
|
|
||||||
)
|
|
||||||
|
|
||||||
def model = new XYModel(Meta.empty(), new NBkgSpectrum(spectrum));
|
def model = new XYModel(Meta.empty(), new SpectrumDataAdapter(), new NBkgSpectrum(spectrum));
|
||||||
|
|
||||||
ParamSet params = morph(ParamSet, [:], "params") {
|
ParamSet params = morph(ParamSet, [:], "params") {
|
||||||
N(value: 1e+04, err: 30, lower: 0)
|
N(value: 1e+12, err: 30, lower: 0)
|
||||||
bkg(value: 1.0, err: 0.1)
|
bkg(value: 1.0, err: 0.1)
|
||||||
E0(value: 18575.0, err: 0.1)
|
E0(value: 18575.0, err: 0.1)
|
||||||
mnu2(value: 0, err: 0.01)
|
mnu2(value: 0, err: 0.01)
|
||||||
@ -44,26 +44,30 @@ new GrindShell(ctx).eval {
|
|||||||
U2(value: 0.0, err: 1e-3)
|
U2(value: 0.0, err: 1e-3)
|
||||||
X(value: 0.0, err: 0.01, lower: 0)
|
X(value: 0.0, err: 0.01, lower: 0)
|
||||||
trap(value: 1.0, err: 0.05)
|
trap(value: 1.0, err: 0.05)
|
||||||
w(300, err: 5)
|
w(value: 150, err: 5)
|
||||||
}
|
}
|
||||||
|
|
||||||
SpectrumGenerator generator = new SpectrumGenerator(model, params, 12316);
|
SpectrumGenerator generator = new SpectrumGenerator(model, params, 12316);
|
||||||
|
|
||||||
PlotHelper ph = plots
|
PlotHelper ph = plots
|
||||||
|
|
||||||
ph.plot((10000..18500).step(100).collectEntries {
|
ph.plot((2000..19500).step(100).collectEntries {
|
||||||
[it,model.value(it,params)]
|
[it, model.value(it, params)]
|
||||||
})
|
}, "spectrum").configure(showLine: true, showSymbol: false, showErrors: false, thickness: 3, color: "red")
|
||||||
|
|
||||||
//
|
|
||||||
// def data = generator.generateData(DataModelUtils.getUniformSpectrumConfiguration(10000, 18500, 10*24*3600, 850));
|
Table data = generator.generateData(DataModelUtils.getUniformSpectrumConfiguration(10000, 19500, 1, 950));
|
||||||
//
|
|
||||||
// FitState state = new FitState(data, model, params);
|
//params.setParValue("w", 151)
|
||||||
//
|
|
||||||
// def fm = ctx.getFeature(FitManager)
|
ph.plot(data).configure(color: "blue")
|
||||||
//
|
|
||||||
// def res = fm.runStage(state, "QOW", FitStage.TASK_RUN, "N", "bkg", "E0", "U2");
|
FitState state = new FitState(data, model, params);
|
||||||
//
|
|
||||||
//
|
def fm = ctx.getFeature(FitManager)
|
||||||
// res.print(ctx.io.out());
|
|
||||||
|
def res = fm.runStage(state, "QOW", FitStage.TASK_RUN, "N", "bkg", "E0", "U2");
|
||||||
|
|
||||||
|
|
||||||
|
res.printState(ctx.io.out().newPrintWriter());
|
||||||
}
|
}
|
@ -134,7 +134,7 @@ public class Main {
|
|||||||
if (!outDir.exists()) {
|
if (!outDir.exists()) {
|
||||||
outDir.mkdirs();
|
outDir.mkdirs();
|
||||||
}
|
}
|
||||||
context.putValue(NumassIO.NUMASS_OUTPUT_CONTEXT_KEY, outDir.toString());
|
context.putValue(NumassIO.Companion.getNUMASS_OUTPUT_CONTEXT_KEY(), outDir.toString());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,137 +0,0 @@
|
|||||||
/*
|
|
||||||
* Copyright 2015 Alexander Nozik.
|
|
||||||
*
|
|
||||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
* you may not use this file except in compliance with the License.
|
|
||||||
* You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing, software
|
|
||||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
* See the License for the specific language governing permissions and
|
|
||||||
* limitations under the License.
|
|
||||||
*/
|
|
||||||
package inr.numass;
|
|
||||||
|
|
||||||
import ch.qos.logback.classic.LoggerContext;
|
|
||||||
import ch.qos.logback.classic.encoder.PatternLayoutEncoder;
|
|
||||||
import ch.qos.logback.classic.spi.ILoggingEvent;
|
|
||||||
import ch.qos.logback.core.Appender;
|
|
||||||
import ch.qos.logback.core.FileAppender;
|
|
||||||
import hep.dataforge.context.Context;
|
|
||||||
import hep.dataforge.io.BasicIOManager;
|
|
||||||
import hep.dataforge.names.Name;
|
|
||||||
import hep.dataforge.utils.ReferenceRegistry;
|
|
||||||
import org.apache.commons.io.output.TeeOutputStream;
|
|
||||||
import org.slf4j.LoggerFactory;
|
|
||||||
|
|
||||||
import java.io.File;
|
|
||||||
import java.io.IOException;
|
|
||||||
import java.io.OutputStream;
|
|
||||||
import java.nio.file.Files;
|
|
||||||
import java.nio.file.Path;
|
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.Arrays;
|
|
||||||
import java.util.List;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @author Darksnake
|
|
||||||
*/
|
|
||||||
public class NumassIO extends BasicIOManager {
|
|
||||||
|
|
||||||
public static final String NUMASS_OUTPUT_CONTEXT_KEY = "numass.outputDir";
|
|
||||||
|
|
||||||
ReferenceRegistry<OutputStream> registry = new ReferenceRegistry<>();
|
|
||||||
// FileAppender<ILoggingEvent> appender;
|
|
||||||
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void attach(Context context) {
|
|
||||||
super.attach(context);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Appender<ILoggingEvent> createLoggerAppender() {
|
|
||||||
LoggerContext lc = (LoggerContext) LoggerFactory.getILoggerFactory();
|
|
||||||
PatternLayoutEncoder ple = new PatternLayoutEncoder();
|
|
||||||
|
|
||||||
ple.setPattern("%date %level [%thread] %logger{10} [%file:%line] %msg%n");
|
|
||||||
ple.setContext(lc);
|
|
||||||
ple.start();
|
|
||||||
FileAppender<ILoggingEvent> appender = new FileAppender<>();
|
|
||||||
appender.setFile(new File(getWorkDirectory().toFile(), getMeta().getString("logFileName", "numass.log")).toString());
|
|
||||||
appender.setEncoder(ple);
|
|
||||||
return appender;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void detach() {
|
|
||||||
super.detach();
|
|
||||||
registry.forEach(it -> {
|
|
||||||
try {
|
|
||||||
it.close();
|
|
||||||
} catch (IOException e) {
|
|
||||||
LoggerFactory.getLogger(getClass()).error("Failed to close output", e);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
public String getExtension(String type) {
|
|
||||||
switch (type){
|
|
||||||
case DEFAULT_OUTPUT_TYPE:
|
|
||||||
return ".out";
|
|
||||||
default:
|
|
||||||
return "." + type;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public OutputStream out(Name stage, Name name, String type) {
|
|
||||||
List<String> tokens = new ArrayList<>();
|
|
||||||
if (getContext().hasValue("numass.path")) {
|
|
||||||
String path = getContext().getString("numass.path");
|
|
||||||
if (path.contains(".")) {
|
|
||||||
tokens.addAll(Arrays.asList(path.split(".")));
|
|
||||||
} else {
|
|
||||||
tokens.add(path);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (stage != null && stage.getLength() != 0) {
|
|
||||||
tokens.addAll(Arrays.asList(stage.asArray()));
|
|
||||||
}
|
|
||||||
|
|
||||||
String dirName = String.join(File.separator, tokens);
|
|
||||||
String fileName = name.toString() + getExtension(type);
|
|
||||||
OutputStream out = buildOut(getWorkDirectory(), dirName, fileName);
|
|
||||||
registry.add(out);
|
|
||||||
return out;
|
|
||||||
}
|
|
||||||
|
|
||||||
private OutputStream buildOut(Path parentDir, String dirName, String fileName) {
|
|
||||||
Path outputFile;
|
|
||||||
|
|
||||||
if (!Files.exists(parentDir)) {
|
|
||||||
throw new RuntimeException("Working directory does not exist");
|
|
||||||
}
|
|
||||||
try {
|
|
||||||
if (dirName != null && !dirName.isEmpty()) {
|
|
||||||
parentDir = parentDir.resolve(dirName);
|
|
||||||
Files.createDirectories(parentDir);
|
|
||||||
}
|
|
||||||
|
|
||||||
// String output = source.meta().getString("output", this.meta().getString("output", fileName + ".onComplete"));
|
|
||||||
outputFile = parentDir.resolve(fileName);
|
|
||||||
|
|
||||||
if (getContext().getBoolean("numass.consoleOutput", false)) {
|
|
||||||
return new TeeOutputStream(Files.newOutputStream(outputFile), System.out);
|
|
||||||
} else {
|
|
||||||
return Files.newOutputStream(outputFile);
|
|
||||||
}
|
|
||||||
} catch (IOException ex) {
|
|
||||||
throw new RuntimeException(ex);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,159 +0,0 @@
|
|||||||
/*
|
|
||||||
* Copyright 2015 Alexander Nozik.
|
|
||||||
*
|
|
||||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
* you may not use this file except in compliance with the License.
|
|
||||||
* You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing, software
|
|
||||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
* See the License for the specific language governing permissions and
|
|
||||||
* limitations under the License.
|
|
||||||
*/
|
|
||||||
package inr.numass.models;
|
|
||||||
|
|
||||||
import hep.dataforge.stat.parametric.AbstractParametricBiFunction;
|
|
||||||
import hep.dataforge.values.ValueProvider;
|
|
||||||
import hep.dataforge.values.Values;
|
|
||||||
|
|
||||||
import static java.lang.Math.*;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @author Darksnake
|
|
||||||
*/
|
|
||||||
public class GaussResolution extends AbstractParametricBiFunction {
|
|
||||||
|
|
||||||
private static final String[] list = {"w", "shift"};
|
|
||||||
|
|
||||||
private double cutoff = 4d;
|
|
||||||
// private final UnivariateIntegrator integrator = new SimpsonIntegrator(1e-3, Double.MAX_VALUE, 3, 64);
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param cutoff - расстояние в сигмах от среднего значения, на котором
|
|
||||||
* функция считается обращающейся в ноль
|
|
||||||
*/
|
|
||||||
public GaussResolution(double cutoff) {
|
|
||||||
super(list);
|
|
||||||
this.cutoff = cutoff;
|
|
||||||
}
|
|
||||||
|
|
||||||
// @Override
|
|
||||||
// public ParametricFunction getConvolutedSpectrum(final RangedNamedSetSpectrum bare) {
|
|
||||||
// return new AbstractParametricFunction(combineNamesWithEquals(this.namesAsArray(), bare.namesAsArray())) {
|
|
||||||
// int maxEval = Global.instance().getInt("INTEGR_POINTS", 500);
|
|
||||||
//
|
|
||||||
// @Override
|
|
||||||
// public double derivValue(String parName, double x, Values set) {
|
|
||||||
// double a = getLowerBound(set);
|
|
||||||
// double b = getUpperBound(set);
|
|
||||||
// assert b > a;
|
|
||||||
// return integrator.integrate(maxEval, getDerivProduct(parName, bare, set, x), a, b);
|
|
||||||
// }
|
|
||||||
//
|
|
||||||
// @Override
|
|
||||||
// public boolean providesDeriv(String name) {
|
|
||||||
// return "w".equals(name) || bare.providesDeriv(name);
|
|
||||||
// }
|
|
||||||
//
|
|
||||||
// @Override
|
|
||||||
// public double value(double x, Values set) {
|
|
||||||
// double a = getLowerBound(set);
|
|
||||||
// double b = getUpperBound(set);
|
|
||||||
// assert b > a;
|
|
||||||
// return integrator.integrate(maxEval, getProduct(bare, set, x), a, b);
|
|
||||||
// }
|
|
||||||
// };
|
|
||||||
// }
|
|
||||||
|
|
||||||
// @Override
|
|
||||||
// public double getDeriv(String name, Values set, double input, double output) {
|
|
||||||
// return this.derivValue(name, output - input, set);
|
|
||||||
// }
|
|
||||||
|
|
||||||
// private UnivariateFunction getDerivProduct(final String name, final ParametricFunction bare, final Values pars, final double x0) {
|
|
||||||
// return (double x) -> {
|
|
||||||
// double res1;
|
|
||||||
// double res2;
|
|
||||||
// try {
|
|
||||||
// res1 = bare.derivValue(name, x0 - x, pars) * GaussResolution.this.value(x, pars);
|
|
||||||
// } catch (NameNotFoundException ex1) {
|
|
||||||
// res1 = 0;
|
|
||||||
// }
|
|
||||||
// try {
|
|
||||||
// res2 = bare.value(x0 - x, pars) * GaussResolution.this.derivValue(name, x, pars);
|
|
||||||
// } catch (NameNotFoundException ex2) {
|
|
||||||
// res2 = 0;
|
|
||||||
// }
|
|
||||||
// return res1 + res2;
|
|
||||||
// };
|
|
||||||
// }
|
|
||||||
|
|
||||||
// private double getLowerBound(final Values pars) {
|
|
||||||
// return getPos(pars) - cutoff * getW(pars);
|
|
||||||
// }
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
// private UnivariateFunction getProduct(final ParametricFunction bare, final Values pars, final double x0) {
|
|
||||||
// return (double x) -> {
|
|
||||||
// double res = bare.value(x0 - x, pars) * GaussResolution.this.value(x, pars);
|
|
||||||
// assert !isNaN(res);
|
|
||||||
// return res;
|
|
||||||
// };
|
|
||||||
// }
|
|
||||||
|
|
||||||
// private double getUpperBound(final Values pars) {
|
|
||||||
// return getPos(pars) + cutoff * getW(pars);
|
|
||||||
// }
|
|
||||||
|
|
||||||
// @Override
|
|
||||||
// public double getValue(Values set, double input, double output) {
|
|
||||||
// return this.value(output - input, set);
|
|
||||||
// }
|
|
||||||
|
|
||||||
private double getPos(ValueProvider pars) {
|
|
||||||
return pars.getDouble("shift", 0);
|
|
||||||
}
|
|
||||||
|
|
||||||
private double getW(ValueProvider pars) {
|
|
||||||
return pars.getDouble("w");
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean providesDeriv(String name) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public double value(double x, double y, Values pars) {
|
|
||||||
double d = x - y;
|
|
||||||
if (abs(d - getPos(pars)) > cutoff * getW(pars)) {
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
double aux = (d - getPos(pars)) / getW(pars);
|
|
||||||
return exp(-aux * aux / 2) / getW(pars) / sqrt(2 * Math.PI);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public double derivValue(String parName, double x, double y, Values pars) {
|
|
||||||
double d = x - y;
|
|
||||||
if (abs(d - getPos(pars)) > cutoff * getW(pars)) {
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
double pos = getPos(pars);
|
|
||||||
double w = getW(pars);
|
|
||||||
|
|
||||||
switch (parName) {
|
|
||||||
case "shift":
|
|
||||||
return this.value(x, y, pars) * (d - pos) / w / w;
|
|
||||||
case "w":
|
|
||||||
return this.value(x, y, pars) * ((d - pos) * (d - pos) / w / w / w - 1 / w);
|
|
||||||
default:
|
|
||||||
return super.derivValue(parName, x, y, pars);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@ -32,7 +32,7 @@ public class NBkgSpectrum extends AbstractParametricFunction {
|
|||||||
private static final String[] list = {"N", "bkg"};
|
private static final String[] list = {"N", "bkg"};
|
||||||
|
|
||||||
public MultiCounter counter = new MultiCounter(this.getClass().getName());
|
public MultiCounter counter = new MultiCounter(this.getClass().getName());
|
||||||
ParametricFunction source;
|
private final ParametricFunction source;
|
||||||
|
|
||||||
public NBkgSpectrum(ParametricFunction source) {
|
public NBkgSpectrum(ParametricFunction source) {
|
||||||
super(combineNamesWithEquals(source.namesAsArray(), list));
|
super(combineNamesWithEquals(source.namesAsArray(), list));
|
||||||
|
@ -22,8 +22,8 @@ import hep.dataforge.values.Values;
|
|||||||
* @author Darksnake
|
* @author Darksnake
|
||||||
*/
|
*/
|
||||||
public class SimpleRange implements SpectrumRange{
|
public class SimpleRange implements SpectrumRange{
|
||||||
Double min;
|
private final Double min;
|
||||||
Double max;
|
private final Double max;
|
||||||
|
|
||||||
public SimpleRange(Double min, Double max) {
|
public SimpleRange(Double min, Double max) {
|
||||||
if(min>=max){
|
if(min>=max){
|
||||||
|
136
numass-main/src/main/kotlin/inr/numass/NumassIO.kt
Normal file
136
numass-main/src/main/kotlin/inr/numass/NumassIO.kt
Normal file
@ -0,0 +1,136 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2015 Alexander Nozik.
|
||||||
|
*
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
package inr.numass
|
||||||
|
|
||||||
|
import ch.qos.logback.classic.LoggerContext
|
||||||
|
import ch.qos.logback.classic.encoder.PatternLayoutEncoder
|
||||||
|
import ch.qos.logback.classic.spi.ILoggingEvent
|
||||||
|
import ch.qos.logback.core.Appender
|
||||||
|
import ch.qos.logback.core.FileAppender
|
||||||
|
import hep.dataforge.context.Context
|
||||||
|
import hep.dataforge.io.BasicIOManager
|
||||||
|
import hep.dataforge.io.IOManager
|
||||||
|
import hep.dataforge.names.Name
|
||||||
|
import hep.dataforge.utils.ReferenceRegistry
|
||||||
|
import org.apache.commons.io.output.TeeOutputStream
|
||||||
|
import org.slf4j.LoggerFactory
|
||||||
|
import java.io.File
|
||||||
|
import java.io.IOException
|
||||||
|
import java.io.OutputStream
|
||||||
|
import java.nio.file.Files
|
||||||
|
import java.nio.file.Path
|
||||||
|
import java.util.*
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author Darksnake
|
||||||
|
*/
|
||||||
|
class NumassIO : BasicIOManager() {
|
||||||
|
|
||||||
|
internal var registry = ReferenceRegistry<OutputStream>()
|
||||||
|
// FileAppender<ILoggingEvent> appender;
|
||||||
|
|
||||||
|
|
||||||
|
override fun attach(context: Context) {
|
||||||
|
super.attach(context)
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun createLoggerAppender(): Appender<ILoggingEvent> {
|
||||||
|
val lc = LoggerFactory.getILoggerFactory() as LoggerContext
|
||||||
|
val ple = PatternLayoutEncoder()
|
||||||
|
|
||||||
|
ple.pattern = "%date %level [%thread] %logger{10} [%file:%line] %msg%n"
|
||||||
|
ple.context = lc
|
||||||
|
ple.start()
|
||||||
|
val appender = FileAppender<ILoggingEvent>()
|
||||||
|
appender.file = File(workDirectory.toFile(), meta.getString("logFileName", "numass.log")).toString()
|
||||||
|
appender.encoder = ple
|
||||||
|
return appender
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun detach() {
|
||||||
|
super.detach()
|
||||||
|
registry.forEach { it ->
|
||||||
|
try {
|
||||||
|
it.close()
|
||||||
|
} catch (e: IOException) {
|
||||||
|
LoggerFactory.getLogger(javaClass).error("Failed to close output", e)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private fun getExtension(type: String): String {
|
||||||
|
return when (type) {
|
||||||
|
IOManager.DEFAULT_OUTPUT_TYPE -> ".out"
|
||||||
|
else -> "." + type
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun out(stage: Name?, name: Name, type: String): OutputStream {
|
||||||
|
val tokens = ArrayList<String>()
|
||||||
|
if (context.hasValue("numass.path")) {
|
||||||
|
val path = context.getString("numass.path")
|
||||||
|
if (path.contains(".")) {
|
||||||
|
tokens.addAll(Arrays.asList(*path.split(".".toRegex()).dropLastWhile { it.isEmpty() }.toTypedArray()))
|
||||||
|
} else {
|
||||||
|
tokens.add(path)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (stage != null && stage.length != 0) {
|
||||||
|
tokens.addAll(Arrays.asList(*stage.asArray()))
|
||||||
|
}
|
||||||
|
|
||||||
|
val dirName = tokens.joinToString(File.separator)
|
||||||
|
val fileName = name.toString() + getExtension(type)
|
||||||
|
val out = buildOut(workDirectory, dirName, fileName)
|
||||||
|
registry.add(out)
|
||||||
|
return out
|
||||||
|
}
|
||||||
|
|
||||||
|
private fun buildOut(parentDir: Path, dirName: String?, fileName: String): OutputStream {
|
||||||
|
val outputFile: Path
|
||||||
|
|
||||||
|
if (!Files.exists(parentDir)) {
|
||||||
|
throw RuntimeException("Working directory does not exist")
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
val dir = if (dirName.isNullOrEmpty()) {
|
||||||
|
parentDir
|
||||||
|
} else {
|
||||||
|
parentDir.resolve(dirName).also {
|
||||||
|
Files.createDirectories(it)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// String output = source.meta().getString("output", this.meta().getString("output", fileName + ".onComplete"));
|
||||||
|
outputFile = dir.resolve(fileName)
|
||||||
|
|
||||||
|
return if (context.getBoolean("numass.consoleOutput", false)!!) {
|
||||||
|
TeeOutputStream(Files.newOutputStream(outputFile), System.out)
|
||||||
|
} else {
|
||||||
|
Files.newOutputStream(outputFile)
|
||||||
|
}
|
||||||
|
} catch (ex: IOException) {
|
||||||
|
throw RuntimeException(ex)
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
companion object {
|
||||||
|
|
||||||
|
val NUMASS_OUTPUT_CONTEXT_KEY = "numass.outputDir"
|
||||||
|
}
|
||||||
|
}
|
@ -9,6 +9,7 @@ import hep.dataforge.stat.parametric.AbstractParametricFunction
|
|||||||
import hep.dataforge.stat.parametric.ParametricFunction
|
import hep.dataforge.stat.parametric.ParametricFunction
|
||||||
import hep.dataforge.utils.ContextMetaFactory
|
import hep.dataforge.utils.ContextMetaFactory
|
||||||
import hep.dataforge.values.Values
|
import hep.dataforge.values.Values
|
||||||
|
import inr.numass.models.misc.FunctionSupport
|
||||||
import inr.numass.utils.NumassIntegrator
|
import inr.numass.utils.NumassIntegrator
|
||||||
import java.util.stream.Stream
|
import java.util.stream.Stream
|
||||||
|
|
||||||
@ -103,17 +104,29 @@ operator fun ParametricFunction.times(num: Number): ParametricFunction {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Calculate convolution of two parametric functions
|
* Calculate convolution of two parametric functions
|
||||||
|
* @param func the function with which this function should be convoluded
|
||||||
|
* @param integrator optional integrator to be used in convolution
|
||||||
|
* @param support a function defining borders for integration. It takes 3 parameter: set of parameters,
|
||||||
|
* name of the derivative (empty for value) and point in which convolution should be calculated.
|
||||||
*/
|
*/
|
||||||
fun ParametricFunction.convolute(func: ParametricFunction,
|
fun ParametricFunction.convolute(
|
||||||
integrator: UnivariateIntegrator<*> = NumassIntegrator.getDefaultIntegrator()): ParametricFunction {
|
func: ParametricFunction,
|
||||||
|
integrator: UnivariateIntegrator<*> = NumassIntegrator.getDefaultIntegrator(),
|
||||||
|
support: Values.(String, Double) -> Pair<Double, Double>
|
||||||
|
): ParametricFunction {
|
||||||
val mergedNames = Names.of(Stream.concat(names.stream(), func.names.stream()).distinct())
|
val mergedNames = Names.of(Stream.concat(names.stream(), func.names.stream()).distinct())
|
||||||
return object : AbstractParametricFunction(mergedNames){
|
return object : AbstractParametricFunction(mergedNames) {
|
||||||
override fun derivValue(parName: String, x: Double, set: Values): Double {
|
override fun derivValue(parName: String, x: Double, set: Values): Double {
|
||||||
TODO("not implemented") //To change body of created functions use File | Settings | File Templates.
|
val (a, b) = set.support(parName, x)
|
||||||
|
return integrator.integrate(a, b) { y: Double ->
|
||||||
|
this@convolute.derivValue(parName, y, set) * func.value(x - y, set) +
|
||||||
|
this@convolute.value(y, set) * func.derivValue(parName, x - y, set)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
override fun value(x: Double, set: Values): Double {
|
override fun value(x: Double, set: Values): Double {
|
||||||
TODO("not implemented") //To change body of created functions use File | Settings | File Templates.
|
val (a, b) = set.support("", x)
|
||||||
|
return integrator.integrate(a, b) { y: Double -> this@convolute.value(y, set) * func.value(x - y, set) }
|
||||||
}
|
}
|
||||||
|
|
||||||
override fun providesDeriv(name: String?): Boolean {
|
override fun providesDeriv(name: String?): Boolean {
|
||||||
@ -122,4 +135,19 @@ fun ParametricFunction.convolute(func: ParametricFunction,
|
|||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@JvmOverloads
|
||||||
|
fun <T> ParametricFunction.convolute(
|
||||||
|
func: T,
|
||||||
|
integrator: UnivariateIntegrator<*> = NumassIntegrator.getDefaultIntegrator()
|
||||||
|
): ParametricFunction where T : ParametricFunction, T : FunctionSupport {
|
||||||
|
//inverted order for correct boundaries
|
||||||
|
return func.convolute(this, integrator) { parName, x ->
|
||||||
|
if (parName.isEmpty()) {
|
||||||
|
func.getSupport(this)
|
||||||
|
} else {
|
||||||
|
func.getDerivSupport(parName, this)
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
@ -0,0 +1,28 @@
|
|||||||
|
package inr.numass.models.misc
|
||||||
|
|
||||||
|
import hep.dataforge.maths.HyperSquareDomain
|
||||||
|
import hep.dataforge.values.Values
|
||||||
|
|
||||||
|
interface FunctionSupport {
|
||||||
|
/**
|
||||||
|
* Get support for function itself
|
||||||
|
*/
|
||||||
|
fun getSupport(params: Values): Pair<Double, Double>
|
||||||
|
|
||||||
|
/**
|
||||||
|
* GetSupport for function derivative
|
||||||
|
*/
|
||||||
|
fun getDerivSupport(parName: String, params: Values): Pair<Double, Double>
|
||||||
|
}
|
||||||
|
|
||||||
|
interface BiFunctionSupport {
|
||||||
|
/**
|
||||||
|
* Get support for function itself
|
||||||
|
*/
|
||||||
|
fun getSupport(x: Double, y: Double, params: Values): HyperSquareDomain
|
||||||
|
|
||||||
|
/**
|
||||||
|
* GetSupport for function derivative
|
||||||
|
*/
|
||||||
|
fun getDerivSupport(parName: String, x: Double, params: Values): HyperSquareDomain
|
||||||
|
}
|
@ -0,0 +1,81 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2015 Alexander Nozik.
|
||||||
|
*
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
package inr.numass.models.misc
|
||||||
|
|
||||||
|
import hep.dataforge.stat.parametric.AbstractParametricFunction
|
||||||
|
import hep.dataforge.values.ValueProvider
|
||||||
|
import hep.dataforge.values.Values
|
||||||
|
|
||||||
|
import java.lang.Math.*
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author Darksnake
|
||||||
|
*/
|
||||||
|
class GaussFunction(private val cutoff: Double = 4.0) : AbstractParametricFunction(list), FunctionSupport {
|
||||||
|
|
||||||
|
private fun getShift(pars: ValueProvider): Double {
|
||||||
|
return pars.getDouble("shift", 0.0)
|
||||||
|
}
|
||||||
|
|
||||||
|
private fun getW(pars: ValueProvider): Double {
|
||||||
|
return pars.getDouble("w")
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun providesDeriv(name: String): Boolean {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
override fun value(d: Double, pars: Values): Double {
|
||||||
|
if (abs(d - getShift(pars)) > cutoff * getW(pars)) {
|
||||||
|
return 0.0
|
||||||
|
}
|
||||||
|
val aux = (d - getShift(pars)) / getW(pars)
|
||||||
|
return exp(-aux * aux / 2) / getW(pars) / sqrt(2 * Math.PI)
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun derivValue(parName: String, d: Double, pars: Values): Double {
|
||||||
|
if (abs(d - getShift(pars)) > cutoff * getW(pars)) {
|
||||||
|
return 0.0
|
||||||
|
}
|
||||||
|
val pos = getShift(pars)
|
||||||
|
val w = getW(pars)
|
||||||
|
|
||||||
|
return when (parName) {
|
||||||
|
"shift" -> this.value(d, pars) * (d - pos) / w / w
|
||||||
|
"w" -> this.value(d, pars) * ((d - pos) * (d - pos) / w / w / w - 1 / w)
|
||||||
|
else -> return 0.0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun getSupport(params: Values): Pair<Double, Double> {
|
||||||
|
val shift = getShift(params)
|
||||||
|
val w = getW(params)
|
||||||
|
return Pair(shift - cutoff * w, shift + cutoff * w)
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun getDerivSupport(parName: String, params: Values): Pair<Double, Double> {
|
||||||
|
val shift = getShift(params)
|
||||||
|
val w = getW(params)
|
||||||
|
return Pair(shift - cutoff * w, shift + cutoff * w)
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
companion object {
|
||||||
|
|
||||||
|
private val list = arrayOf("w", "shift")
|
||||||
|
}
|
||||||
|
}
|
@ -18,6 +18,7 @@ import hep.dataforge.stat.parametric.AbstractParametricFunction
|
|||||||
import hep.dataforge.stat.parametric.ParametricBiFunction
|
import hep.dataforge.stat.parametric.ParametricBiFunction
|
||||||
import hep.dataforge.values.ValueType.BOOLEAN
|
import hep.dataforge.values.ValueType.BOOLEAN
|
||||||
import hep.dataforge.values.Values
|
import hep.dataforge.values.Values
|
||||||
|
import inr.numass.getFSS
|
||||||
import inr.numass.models.FSS
|
import inr.numass.models.FSS
|
||||||
import inr.numass.utils.NumassIntegrator
|
import inr.numass.utils.NumassIntegrator
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user