[no commit message]
This commit is contained in:
parent
091dfd8f8d
commit
ec76341522
@ -27,7 +27,7 @@ import hep.dataforge.likelihood.BayesianManager
|
||||
import inr.numass.data.SpectrumDataAdapter
|
||||
import inr.numass.models.BetaSpectrum;
|
||||
import inr.numass.models.ModularSpectrum;
|
||||
import inr.numass.models.ModularTritiumSpectrum
|
||||
|
||||
import inr.numass.models.NBkgSpectrum;
|
||||
import inr.numass.models.RangedNamedSetSpectrum;
|
||||
import inr.numass.models.ResolutionFunction
|
||||
@ -52,7 +52,7 @@ File fssfile = new File("c:\\Users\\Darksnake\\Dropbox\\PlayGround\\FS.txt");
|
||||
BivariateFunction resolution = new ResolutionFunction(2.28e-4);
|
||||
//resolution.setTailFunction(ResolutionFunction.getRealTail())
|
||||
|
||||
ModularTritiumSpectrum sp = new ModularTritiumSpectrum(resolution, 18395d, 18580d, fssfile);
|
||||
ModularSpectrum sp = new ModularSpectrum(new BetaSpectrum(fssfile), resolution, 18395d, 18580d);
|
||||
sp.setCaching(false);
|
||||
//RangedNamedSetSpectrum beta = new BetaSpectrum(fssfile);
|
||||
//ModularSpectrum sp = new ModularSpectrum(beta, 2.28e-4, 18395d, 18580d);
|
||||
|
@ -13,80 +13,81 @@
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package inr.numass.scripts;
|
||||
|
||||
import hep.dataforge.context.GlobalContext;
|
||||
import static hep.dataforge.context.GlobalContext.out;
|
||||
import hep.dataforge.tables.ListTable;
|
||||
import hep.dataforge.datafitter.FitManager;
|
||||
import hep.dataforge.datafitter.FitState;
|
||||
import hep.dataforge.datafitter.FitTask;
|
||||
import hep.dataforge.datafitter.ParamSet;
|
||||
import hep.dataforge.datafitter.models.XYModel;
|
||||
import hep.dataforge.exceptions.NamingException;
|
||||
import inr.numass.data.SpectrumDataAdapter;
|
||||
import inr.numass.data.SpectrumGenerator;
|
||||
import inr.numass.models.ModularTritiumSpectrum;
|
||||
import inr.numass.models.NBkgSpectrum;
|
||||
import inr.numass.utils.DataModelUtils;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.util.Locale;
|
||||
import static java.util.Locale.setDefault;
|
||||
|
||||
/**
|
||||
*
|
||||
* @author Darksnake
|
||||
*/
|
||||
|
||||
setDefault(Locale.US);
|
||||
GlobalContext global = GlobalContext.instance();
|
||||
// global.loadModule(new MINUITModule());
|
||||
|
||||
FitManager fm = new FitManager();
|
||||
|
||||
ModularTritiumSpectrum beta = new ModularTritiumSpectrum(9e-5, 14390d, 19001d, null);
|
||||
beta.setCaching(false);
|
||||
|
||||
NBkgSpectrum spectrum = new NBkgSpectrum(beta);
|
||||
XYModel model = new XYModel("tritium", spectrum, new SpectrumDataAdapter());
|
||||
|
||||
ParamSet allPars = new ParamSet();
|
||||
|
||||
allPars.setParValue("N", 3e5);
|
||||
//значение 6е-6 соответствует полной интенстивности 6е7 распадов в секунду
|
||||
//Проблема была в переполнении счетчика событий в генераторе. Заменил на long. Возможно стоит поставить туда число с плавающей точкой
|
||||
allPars.setParError("N", 6);
|
||||
allPars.setParDomain("N", 0d, Double.POSITIVE_INFINITY);
|
||||
allPars.setParValue("bkg", 2d);
|
||||
allPars.setParError("bkg", 0.03);
|
||||
allPars.setParValue("E0", 18575.0);
|
||||
allPars.setParError("E0", 2);
|
||||
allPars.setParValue("mnu2", 0d);
|
||||
allPars.setParError("mnu2", 1d);
|
||||
allPars.setParValue("msterile2", 1000 * 1000);
|
||||
allPars.setParValue("U2", 0);
|
||||
allPars.setParError("U2", 1e-4);
|
||||
allPars.setParDomain("U2", -1d, 1d);
|
||||
allPars.setParValue("X", 0);
|
||||
allPars.setParError("X", 0.01);
|
||||
allPars.setParDomain("X", 0d, Double.POSITIVE_INFINITY);
|
||||
allPars.setParValue("trap", 0);
|
||||
allPars.setParError("trap", 0.01d);
|
||||
allPars.setParDomain("trap", 0d, Double.POSITIVE_INFINITY);
|
||||
|
||||
// PrintNamed.printSpectrum(GlobalContext.out(), spectrum, allPars, 0.0, 18700.0, 600);
|
||||
//String fileName = "d:\\PlayGround\\merge\\scans.out";
|
||||
// String configName = "d:\\PlayGround\\SCAN.CFG";
|
||||
// ListTable config = OldDataReader.readConfig(configName);
|
||||
SpectrumGenerator generator = new SpectrumGenerator(model, allPars, 12316);
|
||||
|
||||
ListTable data = generator.generateData(DataModelUtils.getUniformSpectrumConfiguration(13500d, 18200, 1e6, 60));
|
||||
|
||||
// data = data.filter("X", Value.of(15510.0), Value.of(18610.0));
|
||||
// allPars.setParValue("X", 0.4);
|
||||
FitState state = fm.buildState(data, model, allPars);
|
||||
|
||||
FitState res = fm.runTask(state, "QOW", FitTask.TASK_RUN, "N", "bkg", "E0", "U2");
|
||||
|
||||
res.print(out());
|
||||
|
||||
package inr.numass.scripts;
|
||||
|
||||
import hep.dataforge.context.GlobalContext;
|
||||
import static hep.dataforge.context.GlobalContext.out;
|
||||
import hep.dataforge.tables.ListTable;
|
||||
import hep.dataforge.datafitter.FitManager;
|
||||
import hep.dataforge.datafitter.FitState;
|
||||
import hep.dataforge.datafitter.FitTask;
|
||||
import hep.dataforge.datafitter.ParamSet;
|
||||
import hep.dataforge.datafitter.models.XYModel;
|
||||
import hep.dataforge.exceptions.NamingException;
|
||||
import inr.numass.data.SpectrumDataAdapter;
|
||||
import inr.numass.data.SpectrumGenerator;
|
||||
import inr.numass.models.BetaSpectrum
|
||||
import inr.numass.models.ModularSpectrum;
|
||||
import inr.numass.models.NBkgSpectrum;
|
||||
import inr.numass.utils.DataModelUtils;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.util.Locale;
|
||||
import static java.util.Locale.setDefault;
|
||||
|
||||
/**
|
||||
*
|
||||
* @author Darksnake
|
||||
*/
|
||||
|
||||
setDefault(Locale.US);
|
||||
GlobalContext global = GlobalContext.instance();
|
||||
// global.loadModule(new MINUITModule());
|
||||
|
||||
FitManager fm = new FitManager();
|
||||
|
||||
ModularSpectrum beta = new ModularSpectrum(new BetaSpectrum(), 9e-5, 14390d, 19001d);
|
||||
beta.setCaching(false);
|
||||
|
||||
NBkgSpectrum spectrum = new NBkgSpectrum(beta);
|
||||
XYModel model = new XYModel("tritium", spectrum, new SpectrumDataAdapter());
|
||||
|
||||
ParamSet allPars = new ParamSet();
|
||||
|
||||
allPars.setParValue("N", 3e5);
|
||||
//значение 6е-6 соответствует полной интенстивности 6е7 распадов в секунду
|
||||
//Проблема была в переполнении счетчика событий в генераторе. Заменил на long. Возможно стоит поставить туда число с плавающей точкой
|
||||
allPars.setParError("N", 6);
|
||||
allPars.setParDomain("N", 0d, Double.POSITIVE_INFINITY);
|
||||
allPars.setParValue("bkg", 2d);
|
||||
allPars.setParError("bkg", 0.03);
|
||||
allPars.setParValue("E0", 18575.0);
|
||||
allPars.setParError("E0", 2);
|
||||
allPars.setParValue("mnu2", 0d);
|
||||
allPars.setParError("mnu2", 1d);
|
||||
allPars.setParValue("msterile2", 1000 * 1000);
|
||||
allPars.setParValue("U2", 0);
|
||||
allPars.setParError("U2", 1e-4);
|
||||
allPars.setParDomain("U2", -1d, 1d);
|
||||
allPars.setParValue("X", 0);
|
||||
allPars.setParError("X", 0.01);
|
||||
allPars.setParDomain("X", 0d, Double.POSITIVE_INFINITY);
|
||||
allPars.setParValue("trap", 0);
|
||||
allPars.setParError("trap", 0.01d);
|
||||
allPars.setParDomain("trap", 0d, Double.POSITIVE_INFINITY);
|
||||
|
||||
// PrintNamed.printSpectrum(GlobalContext.out(), spectrum, allPars, 0.0, 18700.0, 600);
|
||||
//String fileName = "d:\\PlayGround\\merge\\scans.out";
|
||||
// String configName = "d:\\PlayGround\\SCAN.CFG";
|
||||
// ListTable config = OldDataReader.readConfig(configName);
|
||||
SpectrumGenerator generator = new SpectrumGenerator(model, allPars, 12316);
|
||||
|
||||
ListTable data = generator.generateData(DataModelUtils.getUniformSpectrumConfiguration(13500d, 18200, 1e6, 60));
|
||||
|
||||
// data = data.filter("X", Value.of(15510.0), Value.of(18610.0));
|
||||
// allPars.setParValue("X", 0.4);
|
||||
FitState state = fm.buildState(data, model, allPars);
|
||||
|
||||
FitState res = fm.runTask(state, "QOW", FitTask.TASK_RUN, "N", "bkg", "E0", "U2");
|
||||
|
||||
res.print(out());
|
||||
|
||||
|
@ -13,100 +13,101 @@
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package inr.numass.scripts;
|
||||
|
||||
import hep.dataforge.meta.MetaBuilder;
|
||||
import hep.dataforge.context.GlobalContext;
|
||||
import hep.dataforge.datafitter.ParamSet;
|
||||
import inr.numass.data.SpectrumInformation;
|
||||
import inr.numass.models.ModularTritiumSpectrum;
|
||||
import inr.numass.models.NBkgSpectrum;
|
||||
import inr.numass.models.ResolutionFunction;
|
||||
import java.util.HashMap;
|
||||
import java.util.Locale;
|
||||
import static java.util.Locale.setDefault;
|
||||
import java.util.Map;
|
||||
import org.apache.commons.math3.analysis.UnivariateFunction;
|
||||
|
||||
|
||||
setDefault(Locale.US);
|
||||
GlobalContext global = GlobalContext.instance();
|
||||
// global.loadModule(new MINUIT());
|
||||
|
||||
// FitManager fm = new FitManager("data 2013");
|
||||
UnivariateFunction reolutionTail = {x ->
|
||||
if (x > 1500) {
|
||||
return 0.98;
|
||||
} else //Intercept = 1.00051, Slope = -1.3552E-5
|
||||
{
|
||||
return 1.00051 - 1.3552E-5 * x;
|
||||
}
|
||||
};
|
||||
|
||||
ModularTritiumSpectrum beta = new ModularTritiumSpectrum(
|
||||
new ResolutionFunction(8.3e-5, reolutionTail), 14490d, 19001d, null);
|
||||
beta.setCaching(false);
|
||||
NBkgSpectrum spectrum = new NBkgSpectrum(beta);
|
||||
|
||||
// XYModel model = new XYModel("tritium", spectrum);
|
||||
ParamSet allPars = new ParamSet();
|
||||
|
||||
allPars.setParValue("N", 3090.1458);
|
||||
//значение 6е-6 соответствует полной интенстивности 6е7 распадов в секунду
|
||||
//Проблема была в переполнении счетчика событий в генераторе. Заменил на long. Возможно стоит поставить туда число с плавающей точкой
|
||||
allPars.setParError("N", 6);
|
||||
allPars.setParDomain("N", 0d, Double.POSITIVE_INFINITY);
|
||||
allPars.setParValue("bkg", 2.2110028);
|
||||
allPars.setParError("bkg", 0.03);
|
||||
allPars.setParValue("E0", 18580.742);
|
||||
allPars.setParError("E0", 2);
|
||||
allPars.setParValue("mnu2", 0d);
|
||||
allPars.setParError("mnu2", 1d);
|
||||
allPars.setParValue("msterile2", 1000 * 1000);
|
||||
allPars.setParValue("U2", 0);
|
||||
allPars.setParError("U2", 1e-4);
|
||||
allPars.setParDomain("U2", -1d, 1d);
|
||||
allPars.setParValue("X", 1.0);
|
||||
allPars.setParError("X", 0.01);
|
||||
allPars.setParDomain("X", 0d, Double.POSITIVE_INFINITY);
|
||||
allPars.setParValue("trap", 1.0d);
|
||||
allPars.setParError("trap", 0.01d);
|
||||
allPars.setParDomain("trap", 0d, Double.POSITIVE_INFINITY);
|
||||
|
||||
SpectrumInformation sign = new SpectrumInformation(spectrum);
|
||||
|
||||
// double Elow = 14000d;
|
||||
// double Eup = 18600d;
|
||||
// int numpoints = (int) ((Eup - Elow) / 50);
|
||||
// double time = 1e6 / numpoints;
|
||||
// DataSet config = getUniformSpectrumConfiguration(Elow, Eup, time, numpoints);
|
||||
// NamedMatrix infoMatrix = sign.getInformationMatrix(allPars, config,"U2","E0","N");
|
||||
//
|
||||
// PrintNamed.printNamedMatrix(Out.out, infoMatrix);
|
||||
// NamedMatrix cov = sign.getExpetedCovariance(allPars, config,"U2","E0","N");
|
||||
//
|
||||
// PrintWriter out = GlobalContext.out();
|
||||
//
|
||||
// printNamedMatrix(out, cov);
|
||||
//
|
||||
// cov = sign.getExpetedCovariance(allPars, config,"U2","E0","N","X");
|
||||
//
|
||||
// printNamedMatrix(out, cov);
|
||||
//PlotManager pm = new PlotManager();
|
||||
|
||||
Map<String, UnivariateFunction> functions = new HashMap<>();
|
||||
|
||||
functions.put("U2", sign.getSignificanceFunction(allPars, "U2", "U2"));
|
||||
// functions.put("UX", sign.getSignificanceFunction(allPars, "U2", "X"));
|
||||
functions.put("X", sign.getSignificanceFunction(allPars, "X", "X"));
|
||||
functions.put("trap", sign.getSignificanceFunction(allPars, "trap", "trap"));
|
||||
functions.put("E0", sign.getSignificanceFunction(allPars, "E0", "E0"));
|
||||
|
||||
MetaBuilder builder = new MetaBuilder("significance");
|
||||
builder.putValue("from", 14000d);
|
||||
builder.putValue("to", 18500d);
|
||||
|
||||
pm.plotFunction(builder.build(), functions);
|
||||
|
||||
// printFuntionSimple(out(), func, 14000d, 18600d, 200);
|
||||
|
||||
package inr.numass.scripts;
|
||||
|
||||
import hep.dataforge.meta.MetaBuilder;
|
||||
import hep.dataforge.context.GlobalContext;
|
||||
import hep.dataforge.datafitter.ParamSet;
|
||||
import inr.numass.data.SpectrumInformation;
|
||||
import inr.numass.models.ModularSpectrum;
|
||||
import inr.numass.models.BetaSpectrum;
|
||||
import inr.numass.models.NBkgSpectrum;
|
||||
import inr.numass.models.ResolutionFunction;
|
||||
import java.util.HashMap;
|
||||
import java.util.Locale;
|
||||
import static java.util.Locale.setDefault;
|
||||
import java.util.Map;
|
||||
import org.apache.commons.math3.analysis.UnivariateFunction;
|
||||
|
||||
|
||||
setDefault(Locale.US);
|
||||
GlobalContext global = GlobalContext.instance();
|
||||
// global.loadModule(new MINUIT());
|
||||
|
||||
// FitManager fm = new FitManager("data 2013");
|
||||
UnivariateFunction reolutionTail = {x ->
|
||||
if (x > 1500) {
|
||||
return 0.98;
|
||||
} else //Intercept = 1.00051, Slope = -1.3552E-5
|
||||
{
|
||||
return 1.00051 - 1.3552E-5 * x;
|
||||
}
|
||||
};
|
||||
|
||||
ModularSpectrum beta = new ModularSpectrum(new BetaSpectrum(),
|
||||
new ResolutionFunction(8.3e-5, reolutionTail), 14490d, 19001d);
|
||||
beta.setCaching(false);
|
||||
NBkgSpectrum spectrum = new NBkgSpectrum(beta);
|
||||
|
||||
// XYModel model = new XYModel("tritium", spectrum);
|
||||
ParamSet allPars = new ParamSet();
|
||||
|
||||
allPars.setParValue("N", 3090.1458);
|
||||
//значение 6е-6 соответствует полной интенстивности 6е7 распадов в секунду
|
||||
//Проблема была в переполнении счетчика событий в генераторе. Заменил на long. Возможно стоит поставить туда число с плавающей точкой
|
||||
allPars.setParError("N", 6);
|
||||
allPars.setParDomain("N", 0d, Double.POSITIVE_INFINITY);
|
||||
allPars.setParValue("bkg", 2.2110028);
|
||||
allPars.setParError("bkg", 0.03);
|
||||
allPars.setParValue("E0", 18580.742);
|
||||
allPars.setParError("E0", 2);
|
||||
allPars.setParValue("mnu2", 0d);
|
||||
allPars.setParError("mnu2", 1d);
|
||||
allPars.setParValue("msterile2", 1000 * 1000);
|
||||
allPars.setParValue("U2", 0);
|
||||
allPars.setParError("U2", 1e-4);
|
||||
allPars.setParDomain("U2", -1d, 1d);
|
||||
allPars.setParValue("X", 1.0);
|
||||
allPars.setParError("X", 0.01);
|
||||
allPars.setParDomain("X", 0d, Double.POSITIVE_INFINITY);
|
||||
allPars.setParValue("trap", 1.0d);
|
||||
allPars.setParError("trap", 0.01d);
|
||||
allPars.setParDomain("trap", 0d, Double.POSITIVE_INFINITY);
|
||||
|
||||
SpectrumInformation sign = new SpectrumInformation(spectrum);
|
||||
|
||||
// double Elow = 14000d;
|
||||
// double Eup = 18600d;
|
||||
// int numpoints = (int) ((Eup - Elow) / 50);
|
||||
// double time = 1e6 / numpoints;
|
||||
// DataSet config = getUniformSpectrumConfiguration(Elow, Eup, time, numpoints);
|
||||
// NamedMatrix infoMatrix = sign.getInformationMatrix(allPars, config,"U2","E0","N");
|
||||
//
|
||||
// PrintNamed.printNamedMatrix(Out.out, infoMatrix);
|
||||
// NamedMatrix cov = sign.getExpetedCovariance(allPars, config,"U2","E0","N");
|
||||
//
|
||||
// PrintWriter out = GlobalContext.out();
|
||||
//
|
||||
// printNamedMatrix(out, cov);
|
||||
//
|
||||
// cov = sign.getExpetedCovariance(allPars, config,"U2","E0","N","X");
|
||||
//
|
||||
// printNamedMatrix(out, cov);
|
||||
//PlotManager pm = new PlotManager();
|
||||
|
||||
Map<String, UnivariateFunction> functions = new HashMap<>();
|
||||
|
||||
functions.put("U2", sign.getSignificanceFunction(allPars, "U2", "U2"));
|
||||
// functions.put("UX", sign.getSignificanceFunction(allPars, "U2", "X"));
|
||||
functions.put("X", sign.getSignificanceFunction(allPars, "X", "X"));
|
||||
functions.put("trap", sign.getSignificanceFunction(allPars, "trap", "trap"));
|
||||
functions.put("E0", sign.getSignificanceFunction(allPars, "E0", "E0"));
|
||||
|
||||
MetaBuilder builder = new MetaBuilder("significance");
|
||||
builder.putValue("from", 14000d);
|
||||
builder.putValue("to", 18500d);
|
||||
|
||||
pm.plotFunction(builder.build(), functions);
|
||||
|
||||
// printFuntionSimple(out(), func, 14000d, 18600d, 200);
|
||||
|
||||
|
@ -29,7 +29,8 @@ import hep.dataforge.exceptions.NamingException;
|
||||
import hep.dataforge.exceptions.PackFormatException;
|
||||
import inr.numass.data.SpectrumDataAdapter;
|
||||
import inr.numass.data.SpectrumGenerator;
|
||||
import inr.numass.models.ModularTritiumSpectrum;
|
||||
import inr.numass.models.BetaSpectrum
|
||||
import inr.numass.models.ModularSpectrum;
|
||||
import inr.numass.models.NBkgSpectrum;
|
||||
import inr.numass.utils.DataModelUtils;
|
||||
import hep.dataforge.plotfit.PlotFitResultAction;
|
||||
@ -50,7 +51,7 @@ new MINUITPlugin().startGlobal();
|
||||
|
||||
FitManager fm = new FitManager();
|
||||
|
||||
ModularTritiumSpectrum beta = new ModularTritiumSpectrum(8.3e-5, 13990d, 18600d, null);
|
||||
ModularSpectrum beta = new ModularSpectrum(new BetaSpectrum(), 8.3e-5, 13990d, 18600d);
|
||||
//beta.setCaching(false);
|
||||
|
||||
NBkgSpectrum spectrum = new NBkgSpectrum(beta);
|
||||
|
@ -29,7 +29,8 @@ import hep.dataforge.exceptions.NamingException;
|
||||
import hep.dataforge.exceptions.PackFormatException;
|
||||
import inr.numass.data.SpectrumDataAdapter;
|
||||
import inr.numass.data.SpectrumGenerator;
|
||||
import inr.numass.models.ModularTritiumSpectrum;
|
||||
import inr.numass.models.BetaSpectrum
|
||||
import inr.numass.models.ModularSpectrum
|
||||
import inr.numass.models.NBkgSpectrum;
|
||||
import inr.numass.models.ResolutionFunction
|
||||
import inr.numass.utils.DataModelUtils;
|
||||
@ -56,7 +57,7 @@ FitManager fm = new FitManager();
|
||||
ResolutionFunction resolution = new ResolutionFunction(8.3e-5);
|
||||
//resolution.setTailFunction(ResolutionFunction.getRealTail());
|
||||
resolution.setTailFunction(ResolutionFunction.getAngledTail(0.00325));
|
||||
ModularTritiumSpectrum beta = new ModularTritiumSpectrum(resolution, 18395d, 18580d, null);
|
||||
ModularSpectrum beta = new ModularSpectrum(new BetaSpectrum(), resolution, 18395d, 18580d);
|
||||
beta.setCaching(false);
|
||||
|
||||
NBkgSpectrum spectrum = new NBkgSpectrum(beta);
|
||||
|
@ -29,7 +29,8 @@ import hep.dataforge.exceptions.NamingException;
|
||||
import hep.dataforge.exceptions.PackFormatException;
|
||||
import inr.numass.data.SpectrumDataAdapter;
|
||||
import inr.numass.data.SpectrumGenerator;
|
||||
import inr.numass.models.ModularTritiumSpectrum;
|
||||
import inr.numass.models.BetaSpectrum
|
||||
import inr.numass.models.ModularSpectrum;
|
||||
import inr.numass.models.NBkgSpectrum;
|
||||
import inr.numass.models.ResolutionFunction
|
||||
import inr.numass.utils.DataModelUtils;
|
||||
@ -55,7 +56,7 @@ FitManager fm = new FitManager();
|
||||
|
||||
BivariateFunction resolution = new ResolutionFunction(8.3e-5);
|
||||
|
||||
ModularTritiumSpectrum beta = new ModularTritiumSpectrum(resolution, 13490d, 18575d, null);
|
||||
ModularSpectrum beta = new ModularSpectrum(new BetaSpectrum(), resolution, 13490d, 18575d);
|
||||
beta.setCaching(false);
|
||||
|
||||
NBkgSpectrum spectrum = new NBkgSpectrum(beta);
|
||||
|
@ -25,7 +25,8 @@ import hep.dataforge.datafitter.models.XYModel;
|
||||
import hep.dataforge.likelihood.BayesianManager
|
||||
import static hep.dataforge.maths.RandomUtils.setSeed;
|
||||
import inr.numass.data.SpectrumGenerator;
|
||||
import inr.numass.models.ModularTritiumSpectrum;
|
||||
import inr.numass.models.BetaSpectrum
|
||||
import inr.numass.models.ModularSpectrum
|
||||
import inr.numass.models.NBkgSpectrum;
|
||||
import static inr.numass.utils.DataModelUtils.getUniformSpectrumConfiguration;
|
||||
import java.io.File;
|
||||
@ -40,7 +41,7 @@ setSeed(543982);
|
||||
|
||||
// TritiumSpectrum beta = new TritiumSpectrum(2e-4, 13995d, 18580d);
|
||||
File fssfile = new File("c:\\Users\\Darksnake\\Dropbox\\PlayGround\\FS.txt");
|
||||
ModularTritiumSpectrum beta = new ModularTritiumSpectrum(8.3e-5, 14400d, 19010d, null);
|
||||
ModularSpectrum beta = new ModularSpectrum(new BetaSpectrum(),8.3e-5, 14400d, 19010d);
|
||||
beta.setCaching(false);
|
||||
NBkgSpectrum spectrum = new NBkgSpectrum(beta);
|
||||
XYModel model = new XYModel("tritium", spectrum);
|
||||
|
@ -36,6 +36,7 @@ import inr.numass.actions.MonitorCorrectAction;
|
||||
import inr.numass.actions.PrepareDataAction;
|
||||
import inr.numass.actions.ReadNumassDataAction;
|
||||
import inr.numass.actions.ReadNumassStorageAction;
|
||||
import inr.numass.actions.ShowEnergySpectrumAction;
|
||||
import inr.numass.actions.ShowLossSpectrumAction;
|
||||
import inr.numass.actions.SlicingAction;
|
||||
import inr.numass.actions.SummaryAction;
|
||||
@ -45,6 +46,7 @@ import inr.numass.models.EmpiricalLossSpectrum;
|
||||
import inr.numass.models.ExperimentalVariableLossSpectrum;
|
||||
import inr.numass.models.GaussSourceSpectrum;
|
||||
import inr.numass.models.GunSpectrum;
|
||||
import inr.numass.models.LossCalculator;
|
||||
import inr.numass.models.ModularSpectrum;
|
||||
import inr.numass.models.NBkgSpectrum;
|
||||
import inr.numass.models.RangedNamedSetSpectrum;
|
||||
@ -83,6 +85,7 @@ public class NumassPlugin extends BasicPlugin {
|
||||
actions.registerAction(ShowLossSpectrumAction.class);
|
||||
actions.registerAction(AdjustErrorsAction.class);
|
||||
actions.registerAction(ReadNumassStorageAction.class);
|
||||
actions.registerAction(ShowEnergySpectrumAction.class);
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -211,6 +214,9 @@ public class NumassPlugin extends BasicPlugin {
|
||||
if (!an.getBoolean("caching", false)) {
|
||||
sp.setCaching(false);
|
||||
}
|
||||
//Adding trapping energy dependence
|
||||
//Intercept = 4.95745, B1 = -0.36879, B2 = 0.00827
|
||||
sp.setTrappingFunction((Ei,Ef)->LossCalculator.getTrapFunction().value(Ei, Ef)*(4.95745-0.36879*Ei+0.00827*Ei*Ei));
|
||||
NBkgSpectrum spectrum = new NBkgSpectrum(sp);
|
||||
|
||||
return new XYModel("tritium", spectrum, getAdapter(an));
|
||||
|
@ -0,0 +1,130 @@
|
||||
/*
|
||||
* To change this license header, choose License Headers in Project Properties.
|
||||
* To change this template file, choose Tools | Templates
|
||||
* and open the template in the editor.
|
||||
*/
|
||||
package inr.numass.actions;
|
||||
|
||||
import hep.dataforge.actions.OneToOneAction;
|
||||
import hep.dataforge.context.Context;
|
||||
import hep.dataforge.description.TypedActionDef;
|
||||
import hep.dataforge.io.ColumnedDataWriter;
|
||||
import hep.dataforge.io.reports.Reportable;
|
||||
import hep.dataforge.meta.Laminate;
|
||||
import hep.dataforge.meta.Meta;
|
||||
import hep.dataforge.meta.MetaBuilder;
|
||||
import hep.dataforge.plots.PlotsPlugin;
|
||||
import hep.dataforge.plots.XYPlotFrame;
|
||||
import hep.dataforge.plots.XYPlottable;
|
||||
import hep.dataforge.plots.data.PlottableData;
|
||||
import hep.dataforge.tables.DataPoint;
|
||||
import hep.dataforge.tables.ListTable;
|
||||
import hep.dataforge.tables.MapPoint;
|
||||
import hep.dataforge.tables.Table;
|
||||
import hep.dataforge.tables.TableFormatBuilder;
|
||||
import hep.dataforge.tables.XYAdapter;
|
||||
import hep.dataforge.values.ValueType;
|
||||
import inr.numass.storage.NMPoint;
|
||||
import inr.numass.storage.NumassData;
|
||||
import java.io.OutputStream;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
/**
|
||||
*
|
||||
* @author Alexander Nozik
|
||||
*/
|
||||
@TypedActionDef(inputType = NumassData.class, outputType = Table.class, name = "energySpectrum", info = "Generate output table and optionally plot for detector energy spectra")
|
||||
public class ShowEnergySpectrumAction extends OneToOneAction<NumassData, Table> {
|
||||
|
||||
@Override
|
||||
protected Table execute(Context context, Reportable log, String name, Laminate inputMeta, NumassData input) {
|
||||
int binning = inputMeta.getInt("binning", 20);
|
||||
boolean normalize = inputMeta.getBoolean("normalize", true);
|
||||
List<NMPoint> points = input.getNMPoints();
|
||||
|
||||
if (points.isEmpty()) {
|
||||
logger().error("Empty data");
|
||||
return null;
|
||||
}
|
||||
|
||||
//build header
|
||||
List<String> names = new ArrayList<>();
|
||||
for (int i = 0; i < points.size(); i++) {
|
||||
names.add(String.format("%d: %.2f", i, points.get(i).getUset()));
|
||||
}
|
||||
|
||||
LinkedHashMap<String, Map<Double, Double>> valueMap = points.stream()
|
||||
.collect(Collectors.toMap(
|
||||
p -> names.get(points.indexOf(p)),
|
||||
p -> p.getMapWithBinning(binning, normalize),
|
||||
(v1, v2) -> v1,
|
||||
() -> new LinkedHashMap<>()
|
||||
));
|
||||
|
||||
Collection<Double> rows = valueMap.values().stream().findAny().get().keySet();
|
||||
|
||||
//Building table format
|
||||
TableFormatBuilder formatBuilder = new TableFormatBuilder();
|
||||
formatBuilder.addColumn("channel",ValueType.NUMBER);
|
||||
names.stream().forEach((columnName) -> {
|
||||
formatBuilder.addColumn(columnName, ValueType.NUMBER);
|
||||
});
|
||||
|
||||
ListTable.Builder builder = new ListTable.Builder(formatBuilder.build());
|
||||
rows.stream().forEachOrdered((Double channel) -> {
|
||||
MapPoint.Builder mb = new MapPoint.Builder();
|
||||
mb.putValue("channel", channel);
|
||||
valueMap.entrySet().forEach((Map.Entry<String, Map<Double, Double>> entry) -> {
|
||||
mb.putValue(entry.getKey(), entry.getValue().get(channel));
|
||||
});
|
||||
builder.addRow(mb.build());
|
||||
});
|
||||
|
||||
OutputStream out = buildActionOutput(context, name);
|
||||
Table table = builder.build();
|
||||
|
||||
ColumnedDataWriter.writeDataSet(out, table, inputMeta.toString());
|
||||
|
||||
if (inputMeta.hasNode("plot") || inputMeta.getBoolean("plot", false)) {
|
||||
XYPlotFrame frame = (XYPlotFrame) PlotsPlugin
|
||||
.buildFrom(context).buildPlotFrame(getName(), name,
|
||||
inputMeta.getNode("plot", Meta.empty()));
|
||||
fillDetectorData(valueMap).forEach(frame::add);
|
||||
|
||||
}
|
||||
return table;
|
||||
}
|
||||
|
||||
private List<XYPlottable> fillDetectorData(LinkedHashMap<String, Map<Double, Double>> map) {
|
||||
List<XYPlottable> plottables = new ArrayList<>();
|
||||
Meta plottableConfig = new MetaBuilder("plot")
|
||||
.setValue("connectionType", "step")
|
||||
.setValue("thickness", 2)
|
||||
.setValue("showLine", true)
|
||||
.setValue("showSymbol", false)
|
||||
.setValue("showErrors", false)
|
||||
.build();
|
||||
|
||||
int index = 0;
|
||||
for (Map.Entry<String, Map<Double, Double>> entry : map.entrySet()) {
|
||||
index++;
|
||||
String seriesName = String.format("%d: %s", index, entry.getKey());
|
||||
|
||||
String[] nameList = {"x", "y"};
|
||||
List<DataPoint> data = entry.getValue().entrySet().stream()
|
||||
.map(e -> new MapPoint(nameList, e.getKey(), e.getValue()))
|
||||
.collect(Collectors.toList());
|
||||
PlottableData datum = PlottableData.plot(seriesName, new XYAdapter(), data);
|
||||
datum.configure(plottableConfig);
|
||||
plottables.add(datum);
|
||||
}
|
||||
return plottables;
|
||||
|
||||
}
|
||||
|
||||
}
|
@ -228,7 +228,7 @@ public class ShowLossSpectrumAction extends OneToOneAction<FitState, FitState> {
|
||||
new MetaBuilder("plot").setValue("plotTitle", "Ion ratio Distribution for " + name)
|
||||
);
|
||||
// XYPlotFrame frame = JFreeChartFrame.drawFrame("Ion ratio Distribution for " + name, null);
|
||||
frame.add(PlottableData.plot("ionRatio", hist, new XYAdapter("binCenter", "count")));
|
||||
frame.add(PlottableData.plot("ionRatio", new XYAdapter("binCenter", "count"), hist));
|
||||
|
||||
return new DescriptiveStatistics(res).getStandardDeviation();
|
||||
}
|
||||
|
@ -126,7 +126,7 @@ public class LossCalculator {
|
||||
return getSingleScatterFunction(exPos, ionPos, exW, ionW, exIonRatio);
|
||||
}
|
||||
|
||||
static BivariateFunction getTrapFunction() {
|
||||
public static BivariateFunction getTrapFunction() {
|
||||
return (double Ei, double Ef) -> {
|
||||
double eps = Ei - Ef;
|
||||
if (eps > 10) {
|
||||
|
@ -35,9 +35,10 @@ public class ModularSpectrum extends AbstractParametricFunction {
|
||||
private static final String[] list = {"X", "trap"};
|
||||
private LossCalculator calculator;
|
||||
List<NamedSpectrumCaching> cacheList;
|
||||
NamedSpectrumCaching trapping;
|
||||
NamedSpectrumCaching trappingCache;
|
||||
BivariateFunction resolution;
|
||||
RangedNamedSetSpectrum sourceSpectrum;
|
||||
BivariateFunction trappingFunction;
|
||||
boolean caching = true;
|
||||
double cacheMin;
|
||||
double cacheMax;
|
||||
@ -79,11 +80,17 @@ public class ModularSpectrum extends AbstractParametricFunction {
|
||||
public ModularSpectrum(RangedNamedSetSpectrum source, double resA, double cacheMin, double cacheMax) {
|
||||
this(source, new ResolutionFunction(resA), cacheMin, cacheMax);
|
||||
}
|
||||
|
||||
|
||||
public ModularSpectrum(RangedNamedSetSpectrum source, double resA) {
|
||||
this(source, new ResolutionFunction(resA));
|
||||
}
|
||||
}
|
||||
|
||||
public void setTrappingFunction(BivariateFunction trappingFunction) {
|
||||
this.trappingFunction = trappingFunction;
|
||||
}
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* Отдельный метод нужен на случай, если бета-спектр(FSS) или разрешение
|
||||
* будут меняться в процессе
|
||||
@ -91,7 +98,8 @@ public class ModularSpectrum extends AbstractParametricFunction {
|
||||
private void setupCache() {
|
||||
|
||||
//обновляем кэши для трэппинга и упругого прохождения
|
||||
BivariateFunction trapFunc = LossCalculator.getTrapFunction();
|
||||
//Using external trappingCache function if provided
|
||||
BivariateFunction trapFunc = trappingFunction != null ? trappingFunction : LossCalculator.getTrapFunction();
|
||||
BivariateFunction trapRes = new LossResConvolution(trapFunc, resolution);
|
||||
|
||||
ParametricFunction elasticSpectrum = new TransmissionConvolution(sourceSpectrum, resolution, sourceSpectrum);
|
||||
@ -105,8 +113,8 @@ public class ModularSpectrum extends AbstractParametricFunction {
|
||||
TritiumSpectrumCaching elasticCache = new TritiumSpectrumCaching(elasticSpectrum, cacheMin, cacheMax);
|
||||
elasticCache.setCachingEnabled(caching);
|
||||
cacheList.add(elasticCache);
|
||||
this.trapping = new TritiumSpectrumCaching(trapSpectrum, cacheMin, cacheMax);
|
||||
this.trapping.setCachingEnabled(caching);
|
||||
this.trappingCache = new TritiumSpectrumCaching(trapSpectrum, cacheMin, cacheMax);
|
||||
this.trappingCache.setCachingEnabled(caching);
|
||||
}
|
||||
|
||||
/**
|
||||
@ -150,7 +158,7 @@ public class ModularSpectrum extends AbstractParametricFunction {
|
||||
|
||||
return derivSum;
|
||||
case "trap":
|
||||
return this.trapping.value(U, set);
|
||||
return this.trappingCache.value(U, set);
|
||||
default:
|
||||
if (sourceSpectrum.names().contains(parName)) {
|
||||
List<Double> probs = calculator.getLossProbabilities(X);
|
||||
@ -161,7 +169,7 @@ public class ModularSpectrum extends AbstractParametricFunction {
|
||||
sum += probs.get(i) * cacheList.get(i).derivValue(parName, U, set);
|
||||
}
|
||||
|
||||
sum += this.getTrap(set) * this.trapping.derivValue(parName, U, set);
|
||||
sum += this.getTrap(set) * this.trappingCache.derivValue(parName, U, set);
|
||||
return sum;
|
||||
} else {
|
||||
return 0;
|
||||
@ -184,29 +192,30 @@ public class ModularSpectrum extends AbstractParametricFunction {
|
||||
|
||||
/**
|
||||
* Set the boundaries and recalculate cache
|
||||
*
|
||||
* @param cacheMin
|
||||
* @param cacheMax
|
||||
* @param cacheMax
|
||||
*/
|
||||
public void setCachingBoundaries(double cacheMin, double cacheMax){
|
||||
public void setCachingBoundaries(double cacheMin, double cacheMax) {
|
||||
this.cacheMin = cacheMin;
|
||||
this.cacheMax = cacheMax;
|
||||
setupCache();
|
||||
}
|
||||
|
||||
|
||||
public final void setCaching(boolean caching) {
|
||||
if(caching && (cacheMin == Double.NaN || cacheMax == Double.NaN)){
|
||||
if (caching && (cacheMin == Double.NaN || cacheMax == Double.NaN)) {
|
||||
throw new IllegalStateException("Cahing boundaries are not defined");
|
||||
}
|
||||
|
||||
|
||||
this.caching = caching;
|
||||
this.trapping.setCachingEnabled(caching);
|
||||
this.trappingCache.setCachingEnabled(caching);
|
||||
for (NamedSpectrumCaching sp : this.cacheList) {
|
||||
sp.setCachingEnabled(caching);
|
||||
}
|
||||
}
|
||||
|
||||
public void setSuppressWarnings(boolean suppress) {
|
||||
this.trapping.setSuppressWarnings(suppress);
|
||||
this.trappingCache.setSuppressWarnings(suppress);
|
||||
for (NamedSpectrumCaching sp : this.cacheList) {
|
||||
sp.setSuppressWarnings(suppress);
|
||||
|
||||
@ -228,7 +237,7 @@ public class ModularSpectrum extends AbstractParametricFunction {
|
||||
res += probs.get(i) * cacheList.get(i).value(U, set);
|
||||
}
|
||||
|
||||
res += this.getTrap(set) * this.trapping.value(U, set);
|
||||
res += this.getTrap(set) * this.trappingCache.value(U, set);
|
||||
return res;
|
||||
}
|
||||
}
|
||||
|
@ -1,216 +0,0 @@
|
||||
/*
|
||||
* Copyright 2015 Alexander Nozik.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package inr.numass.models;
|
||||
|
||||
import hep.dataforge.functions.AbstractParametricFunction;
|
||||
import hep.dataforge.functions.ParametricFunction;
|
||||
import hep.dataforge.maths.NamedDoubleSet;
|
||||
import java.io.File;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import org.apache.commons.math3.analysis.BivariateFunction;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
* Modular tritium spectrum with separate calculation for different transmission
|
||||
* components
|
||||
*
|
||||
* @author Darksnake
|
||||
*/
|
||||
public class ModularTritiumSpectrum extends AbstractParametricFunction {
|
||||
|
||||
private static final String[] list = {"U2", "E0", "mnu2", "msterile2", "X", "trap"};
|
||||
ParametricFunction bareBeta;
|
||||
boolean caching;
|
||||
private LossCalculator calculator;
|
||||
double elow;
|
||||
double endpoint;
|
||||
File fssfile;
|
||||
BivariateFunction resolution;
|
||||
List<NamedSpectrumCaching> scatterList;
|
||||
NamedSpectrumCaching trapping;
|
||||
|
||||
// NamedSpectrumCaching elastic;
|
||||
// NamedSpectrumCaching inelastic;
|
||||
// NamedSpectrumCaching inelastic2;
|
||||
/**
|
||||
*
|
||||
* @param resolution
|
||||
* @param elow - нижняя граница кэширования. Должна быть с небольшим запасом
|
||||
* по отношению к данным
|
||||
* @param endpoint - верхняя граница кэширования, может быть без запаса.
|
||||
* @param fssFile
|
||||
*/
|
||||
public ModularTritiumSpectrum(BivariateFunction resolution, double elow, double endpoint, File fssFile) {
|
||||
super(list);
|
||||
assert (endpoint > elow);
|
||||
this.elow = elow;
|
||||
this.endpoint = endpoint;
|
||||
this.fssfile = fssFile;
|
||||
this.resolution = resolution;
|
||||
this.calculator = LossCalculator.instance();
|
||||
setupCache();
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param resA - относительная ширина разрешения
|
||||
* @param elow - нижняя граница кэширования. Должна быть с небольшим запасом
|
||||
* по отношению к данным
|
||||
* @param endpoint - верхняя граница кэширования, может быть без запаса.
|
||||
* @param fssFile
|
||||
*/
|
||||
public ModularTritiumSpectrum(double resA, double elow, double endpoint, File fssFile) {
|
||||
this(new ResolutionFunction(resA), elow, endpoint, fssFile);
|
||||
}
|
||||
|
||||
@Override
|
||||
public double derivValue(String parName, double U, NamedDoubleSet set) {
|
||||
if (U >= endpoint) {
|
||||
return 0;
|
||||
}
|
||||
double X = this.getX(set);
|
||||
switch (parName) {
|
||||
case "U2":
|
||||
case "E0":
|
||||
case "mnu2":
|
||||
case "msterile2":
|
||||
List<Double> probs = calculator.getLossProbabilities(X);
|
||||
updateScatterCache(probs.size() - 1);
|
||||
double sum = 0;
|
||||
|
||||
for (int i = 0; i < probs.size(); i++) {
|
||||
sum += probs.get(i) * scatterList.get(i).derivValue(parName, U, set);
|
||||
}
|
||||
|
||||
return sum + this.getTrap(set) * this.trapping.derivValue(parName, U, set);
|
||||
case "X":
|
||||
List<Double> probDerivs = calculator.getLossProbDerivs(X);
|
||||
updateScatterCache(probDerivs.size() - 1);
|
||||
double derivSum = 0;
|
||||
|
||||
for (int i = 0; i < probDerivs.size(); i++) {
|
||||
derivSum += probDerivs.get(i) * scatterList.get(i).value(U, set);
|
||||
}
|
||||
|
||||
return derivSum;
|
||||
|
||||
// return (X / 3 - 0.5) * this.elastic.value(x, set)
|
||||
// + (0.5 - 2 * X / 3) * this.inelastic.value(x, set)
|
||||
// + (X / 3) * this.inelastic2.value(x, set);
|
||||
case "trap":
|
||||
return this.trapping.value(U, set);
|
||||
default:
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
private double getTrap(NamedDoubleSet set) {
|
||||
return set.getValue("trap");
|
||||
}
|
||||
|
||||
private double getX(NamedDoubleSet set) {
|
||||
return set.getValue("X");
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean providesDeriv(String name) {
|
||||
return true;
|
||||
}
|
||||
|
||||
public void setCaching(boolean caching) {
|
||||
this.caching = caching;
|
||||
this.trapping.setCachingEnabled(caching);
|
||||
for (NamedSpectrumCaching sp : this.scatterList) {
|
||||
sp.setCachingEnabled(caching);
|
||||
}
|
||||
}
|
||||
|
||||
public void setSuppressWarnings(boolean suppress) {
|
||||
this.trapping.setSuppressWarnings(suppress);
|
||||
for (NamedSpectrumCaching sp : this.scatterList) {
|
||||
sp.setSuppressWarnings(suppress);
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Отдельный метод нужен на случай, если бета-спектр(FSS) или разрешение
|
||||
* будут меняться в процессе
|
||||
*/
|
||||
private void setupCache() {
|
||||
if (fssfile == null) {
|
||||
bareBeta = new BetaSpectrum();
|
||||
} else {
|
||||
bareBeta = new BetaSpectrum(fssfile);
|
||||
}
|
||||
|
||||
//обновляем кэши для трэппинга и упругого прохождения
|
||||
BivariateFunction trapFunc = LossCalculator.getTrapFunction();
|
||||
BivariateFunction trapRes = new LossResConvolution(trapFunc, resolution);
|
||||
|
||||
ParametricFunction elasticSpectrum = new TransmissionConvolution(bareBeta, resolution, endpoint);
|
||||
ParametricFunction trapSpectrum = new TransmissionConvolution(bareBeta, trapRes, endpoint);
|
||||
scatterList = new ArrayList<>();
|
||||
//добавляем нулевой порядок - упругое рассеяние
|
||||
scatterList.add(new TritiumSpectrumCaching(elasticSpectrum, elow, endpoint));
|
||||
this.trapping = new TritiumSpectrumCaching(trapSpectrum, elow, endpoint);
|
||||
/**
|
||||
* обнуляем кэш рассеяния
|
||||
*/
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Обновляем кэш рассеяния если требуемый порядок выше, чем тот, что есть
|
||||
*
|
||||
* @param order
|
||||
*/
|
||||
private void updateScatterCache(int order) {
|
||||
if (order >= scatterList.size()) {
|
||||
LoggerFactory.getLogger(getClass())
|
||||
.debug("Updating scatter cache up to order of '{}'", order);
|
||||
// здесь можно сэкономить вызовы, начиная с scatterList.size(), но надо это?
|
||||
for (int i = 1; i < order + 1; i++) {
|
||||
BivariateFunction loss = calculator.getLossFunction(i);
|
||||
BivariateFunction lossRes = new LossResConvolution(loss, resolution);
|
||||
ParametricFunction inelasticSpectrum = new TransmissionConvolution(bareBeta, lossRes, endpoint);
|
||||
TritiumSpectrumCaching spCatch = new TritiumSpectrumCaching(inelasticSpectrum, elow, endpoint);
|
||||
spCatch.setCachingEnabled(caching);
|
||||
//TODO сделать пороверку
|
||||
scatterList.add(i, spCatch);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public double value(double x, NamedDoubleSet set) {
|
||||
if (x >= endpoint) {
|
||||
return 0;
|
||||
}
|
||||
double X = this.getX(set);
|
||||
|
||||
List<Double> probs = calculator.getLossProbabilities(X);
|
||||
updateScatterCache(probs.size() - 1);
|
||||
double res = 0;
|
||||
|
||||
for (int i = 0; i < probs.size(); i++) {
|
||||
res += probs.get(i) * scatterList.get(i).value(x, set);
|
||||
}
|
||||
|
||||
return res + this.getTrap(set) * this.trapping.value(x, set);
|
||||
}
|
||||
}
|
@ -13,7 +13,7 @@ import javafx.fxml.FXMLLoader;
|
||||
import javafx.scene.Parent;
|
||||
import javafx.scene.Scene;
|
||||
import javafx.stage.Stage;
|
||||
|
||||
import javafx.stage.WindowEvent;
|
||||
|
||||
/**
|
||||
*
|
||||
@ -30,10 +30,14 @@ public class Workbench extends Application {
|
||||
|
||||
NumassWorkbenchController controller = loader.getController();
|
||||
controller.setContextFactory(NumassContext::new);
|
||||
|
||||
|
||||
primaryStage.setTitle("Numass workbench");
|
||||
primaryStage.setScene(scene);
|
||||
primaryStage.show();
|
||||
|
||||
scene.getWindow().setOnCloseRequest((WindowEvent event) -> {
|
||||
controller.getContext().processManager().getRootProcess().cancel(true);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -19,7 +19,8 @@ import hep.dataforge.context.GlobalContext;
|
||||
import hep.dataforge.datafitter.MINUITPlugin;
|
||||
import hep.dataforge.datafitter.ParamSet;
|
||||
import hep.dataforge.exceptions.NamingException;
|
||||
import inr.numass.models.ModularTritiumSpectrum;
|
||||
import inr.numass.models.BetaSpectrum;
|
||||
import inr.numass.models.ModularSpectrum;
|
||||
import java.io.File;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.util.Locale;
|
||||
@ -43,7 +44,7 @@ public class NumassSpectrumTest {
|
||||
ParamSet allPars = new ParamSet();
|
||||
|
||||
allPars.setParValue("N", 3000);
|
||||
//значение 6е-6 соответствует полной интенстивности 6е7 распадов в секунду
|
||||
//значение 6е-6 соответствует полной интенстивности 6е7 распадов в секунду
|
||||
//Проблема была в переполнении счетчика событий в генераторе. Заменил на long. Возможно стоит поставить туда число с плавающей точкой
|
||||
allPars.setParError("N", 6);
|
||||
allPars.setParDomain("N", 0d, Double.POSITIVE_INFINITY);
|
||||
@ -64,7 +65,7 @@ public class NumassSpectrumTest {
|
||||
allPars.setParError("trap", 0.01d);
|
||||
allPars.setParDomain("trap", 0d, Double.POSITIVE_INFINITY);
|
||||
|
||||
ModularTritiumSpectrum betaNew = new ModularTritiumSpectrum(1e-4, 14390d, 19001d, new File("d:\\PlayGround\\FS.txt"));
|
||||
ModularSpectrum betaNew = new ModularSpectrum(new BetaSpectrum(new File("d:\\PlayGround\\FS.txt")), 1e-4, 14390d, 19001d);
|
||||
betaNew.setCaching(false);
|
||||
|
||||
System.out.println(betaNew.value(17000d, allPars));
|
||||
|
@ -28,6 +28,7 @@ import hep.dataforge.storage.api.Storage;
|
||||
import hep.dataforge.storage.loaders.AbstractLoader;
|
||||
import hep.dataforge.tables.Table;
|
||||
import hep.dataforge.values.Value;
|
||||
import static inr.numass.storage.RawNMPoint.MAX_EVENTS_PER_POINT;
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
@ -193,6 +194,13 @@ public class NumassDataLoader extends AbstractLoader implements ObjectLoader<Env
|
||||
} else {
|
||||
pointTime = envelope.meta().getValue("acquisition_time").doubleValue();
|
||||
}
|
||||
|
||||
//Check if the point is composite
|
||||
boolean segmented = envelope.meta().hasValue("events") && envelope.meta().getValue("events").isList();
|
||||
|
||||
if (!segmented && events.size() > MAX_EVENTS_PER_POINT) {
|
||||
pointTime = events.get(events.size() - 1).getTime() - events.get(0).getTime();
|
||||
}
|
||||
RawNMPoint raw = new RawNMPoint(u, u,
|
||||
events,
|
||||
pointTime,
|
||||
|
@ -105,9 +105,7 @@ public class RawNMPoint implements Cloneable {
|
||||
if (Double.isNaN(length)) {
|
||||
throw new Error();
|
||||
}
|
||||
if(events.size()>MAX_EVENTS_PER_POINT){
|
||||
return events.get(events.size()-1).getTime()-events.get(0).getTime();
|
||||
}
|
||||
|
||||
return length;
|
||||
}
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user