[no commit message]
This commit is contained in:
parent
0d21135c59
commit
c86891a8c8
@ -18,27 +18,14 @@ File dataDir = new File("D:\\Work\\Numass\\data\\2016_04\\T2_data\\Fill_2_2\\set
|
|||||||
if(!dataDir.exists()){
|
if(!dataDir.exists()){
|
||||||
println "dataDir directory does not exist"
|
println "dataDir directory does not exist"
|
||||||
}
|
}
|
||||||
int lowerChannel = 400
|
|
||||||
int upperChannel = 1700
|
|
||||||
|
|
||||||
Meta config = new GrindMetaBuilder().config(lowerChannel: lowerChannel, upperChannel: upperChannel)
|
Meta config = new GrindMetaBuilder().config(lowerChannel: 400, upperChannel: 1700)
|
||||||
println config
|
//println config
|
||||||
NumassData data = NumassDataLoader.fromLocalDir(null, dataDir)
|
NumassData data = NumassDataLoader.fromLocalDir(null, dataDir)
|
||||||
Map<String, NumassData> res = new PileupSimulationAction().simpleRun(data,config)
|
Map<String, NumassData> res = new PileupSimulationAction().simpleRun(data,config)
|
||||||
|
|
||||||
def keys = res.keySet();
|
def keys = res.keySet();
|
||||||
|
|
||||||
//printing count rate in window
|
|
||||||
print "U\t"
|
|
||||||
print "Length\t"
|
|
||||||
println keys.join("\t")
|
|
||||||
|
|
||||||
for(int i = 0; i < data.getNMPoints().size();i++){
|
|
||||||
print "${data.getNMPoints().get(i).getUset()}\t"
|
|
||||||
print "${data.getNMPoints().get(i).getLength()}\t"
|
|
||||||
println keys.collect{res[it].getNMPoints().get(i).getCountInWindow(500,1700)}.join("\t")
|
|
||||||
}
|
|
||||||
|
|
||||||
//print spectra for selected point
|
//print spectra for selected point
|
||||||
double u = 15000d;
|
double u = 15000d;
|
||||||
|
|
||||||
@ -55,3 +42,14 @@ points.first().keySet().each{
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
//printing count rate in window
|
||||||
|
print "U\tLength\t"
|
||||||
|
print keys.collect{it+"[total]"}.join("\t") + "\t"
|
||||||
|
println keys.join("\t")
|
||||||
|
|
||||||
|
for(int i = 0; i < data.getNMPoints().size();i++){
|
||||||
|
print "${data.getNMPoints().get(i).getUset()}\t"
|
||||||
|
print "${data.getNMPoints().get(i).getLength()}\t"
|
||||||
|
print keys.collect{res[it].getNMPoints().get(i).getEventsCount()}.join("\t") + "\t"
|
||||||
|
println keys.collect{res[it].getNMPoints().get(i).getCountInWindow(500,1700)}.join("\t")
|
||||||
|
}
|
||||||
|
@ -46,7 +46,6 @@ import inr.numass.models.EmpiricalLossSpectrum;
|
|||||||
import inr.numass.models.ExperimentalVariableLossSpectrum;
|
import inr.numass.models.ExperimentalVariableLossSpectrum;
|
||||||
import inr.numass.models.GaussSourceSpectrum;
|
import inr.numass.models.GaussSourceSpectrum;
|
||||||
import inr.numass.models.GunSpectrum;
|
import inr.numass.models.GunSpectrum;
|
||||||
import inr.numass.models.LossCalculator;
|
|
||||||
import inr.numass.models.ModularSpectrum;
|
import inr.numass.models.ModularSpectrum;
|
||||||
import inr.numass.models.NBkgSpectrum;
|
import inr.numass.models.NBkgSpectrum;
|
||||||
import inr.numass.models.RangedNamedSetSpectrum;
|
import inr.numass.models.RangedNamedSetSpectrum;
|
||||||
@ -221,10 +220,10 @@ public class NumassPlugin extends BasicPlugin {
|
|||||||
//Intercept = 4.95745, B1 = -0.36879, B2 = 0.00827
|
//Intercept = 4.95745, B1 = -0.36879, B2 = 0.00827
|
||||||
//sp.setTrappingFunction((Ei,Ef)->LossCalculator.getTrapFunction().value(Ei, Ef)*(4.95745-0.36879*Ei+0.00827*Ei*Ei));
|
//sp.setTrappingFunction((Ei,Ef)->LossCalculator.getTrapFunction().value(Ei, Ef)*(4.95745-0.36879*Ei+0.00827*Ei*Ei));
|
||||||
sp.setTrappingFunction((Ei, Ef) -> {
|
sp.setTrappingFunction((Ei, Ef) -> {
|
||||||
return 4.1e-5 * FastMath.exp(-(Ei - Ef) / 600d) + 7.2e-3 * (0.05349 - 4.36375E-6 * (Ei) + 1.09875E-10 * Ei * Ei);
|
return 7.12e-5 * FastMath.exp(-(Ei - Ef) / 350d) + 0.1564 * (0.00123-4.2e-8*Ei);
|
||||||
});
|
});
|
||||||
context.getReport().report("Using folowing trapping formula: {}",
|
context.getReport().report("Using folowing trapping formula: {}",
|
||||||
"4e-5 * FastMath.exp(-(Ei - Ef) / 550d) + 7.2e-3 * (0.05349 - 4.36375E-6 * (Ei) + 1.09875E-10 * Ei**2)");
|
"7.12e-5 * FastMath.exp(-(Ei - Ef) / 350d) + 0.1564 * (0.00123-4.2e-8*Ei)");
|
||||||
NBkgSpectrum spectrum = new NBkgSpectrum(sp);
|
NBkgSpectrum spectrum = new NBkgSpectrum(sp);
|
||||||
|
|
||||||
return new XYModel(spectrum, getAdapter(an));
|
return new XYModel(spectrum, getAdapter(an));
|
||||||
|
@ -31,25 +31,35 @@ public class PileupSimulationAction extends OneToOneAction<NumassData, Map<Strin
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected Map<String, NumassData> execute(Context context, Reportable log, String name, Laminate inputMeta, NumassData input) {
|
protected Map<String, NumassData> execute(Context context, Reportable log, String name, Laminate inputMeta, NumassData input) {
|
||||||
int lowerChannel = inputMeta.getInt("lowerChannel", 0);
|
int lowerChannel = inputMeta.getInt("lowerChannel", 1);
|
||||||
int upperChannel = inputMeta.getInt("upperChannel", RawNMPoint.MAX_CHANEL);
|
int upperChannel = inputMeta.getInt("upperChannel", RawNMPoint.MAX_CHANEL-1);
|
||||||
|
|
||||||
List<NMPoint> generated = new ArrayList<>();
|
List<NMPoint> generated = new ArrayList<>();
|
||||||
List<NMPoint> registered = new ArrayList<>();
|
List<NMPoint> registered = new ArrayList<>();
|
||||||
List<NMPoint> firstIteration = new ArrayList<>();
|
List<NMPoint> firstIteration = new ArrayList<>();
|
||||||
|
List<NMPoint> secondIteration = new ArrayList<>();
|
||||||
List<NMPoint> pileup = new ArrayList<>();
|
List<NMPoint> pileup = new ArrayList<>();
|
||||||
|
|
||||||
double crScale = inputMeta.getDouble("crScale",1d);
|
double scale = inputMeta.getDouble("scale", 1d);
|
||||||
|
|
||||||
input.getNMPoints().forEach(point -> {
|
input.getNMPoints().forEach(point -> {
|
||||||
PileUpSimulator simulator = new PileUpSimulator(crScale * point.getCountRate(lowerChannel, upperChannel, 0), point.getLength())
|
double length = point.getLength() * scale;
|
||||||
.withGenerator(point, lowerChannel, upperChannel)
|
double cr = point.getCountRate(lowerChannel, upperChannel, 6.4e-6);
|
||||||
|
|
||||||
|
PileUpSimulator simulator = new PileUpSimulator(cr, length)
|
||||||
|
.withGenerator(point, null, lowerChannel, upperChannel)
|
||||||
.generate();
|
.generate();
|
||||||
|
|
||||||
//second iteration to exclude pileup overlap
|
//second iteration to exclude pileup overlap
|
||||||
NMPoint pileupPoint = simulator.pileup();
|
NMPoint pileupPoint = simulator.pileup();
|
||||||
firstIteration.add(simulator.registered());
|
firstIteration.add(simulator.registered());
|
||||||
simulator = new PileUpSimulator(crScale * point.getCountRate(lowerChannel, upperChannel, 0), point.getLength())
|
simulator = new PileUpSimulator(cr, length)
|
||||||
|
.withGenerator(point, pileupPoint, lowerChannel, upperChannel)
|
||||||
|
.generate();
|
||||||
|
|
||||||
|
pileupPoint = simulator.pileup();
|
||||||
|
secondIteration.add(simulator.registered());
|
||||||
|
simulator = new PileUpSimulator(cr, length)
|
||||||
.withGenerator(point, pileupPoint, lowerChannel, upperChannel)
|
.withGenerator(point, pileupPoint, lowerChannel, upperChannel)
|
||||||
.generate();
|
.generate();
|
||||||
|
|
||||||
@ -62,6 +72,7 @@ public class PileupSimulationAction extends OneToOneAction<NumassData, Map<Strin
|
|||||||
res.put("generated", new SimulatedPoint("generated", generated));
|
res.put("generated", new SimulatedPoint("generated", generated));
|
||||||
res.put("registered", new SimulatedPoint("registered", registered));
|
res.put("registered", new SimulatedPoint("registered", registered));
|
||||||
res.put("firstIteration", new SimulatedPoint("firstIteration", firstIteration));
|
res.put("firstIteration", new SimulatedPoint("firstIteration", firstIteration));
|
||||||
|
res.put("secondIteration", new SimulatedPoint("secondIteration", secondIteration));
|
||||||
res.put("pileup", new SimulatedPoint("pileup", pileup));
|
res.put("pileup", new SimulatedPoint("pileup", pileup));
|
||||||
return res;
|
return res;
|
||||||
}
|
}
|
||||||
|
@ -40,6 +40,7 @@ import java.util.ArrayList;
|
|||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
import java.util.function.Function;
|
||||||
import java.util.stream.Collectors;
|
import java.util.stream.Collectors;
|
||||||
import org.apache.commons.math3.analysis.ParametricUnivariateFunction;
|
import org.apache.commons.math3.analysis.ParametricUnivariateFunction;
|
||||||
import org.apache.commons.math3.exception.DimensionMismatchException;
|
import org.apache.commons.math3.exception.DimensionMismatchException;
|
||||||
@ -59,9 +60,10 @@ import org.apache.commons.math3.fitting.WeightedObservedPoint;
|
|||||||
info = "Enables calculation of detector threshold underflow using exponential shape of energy spectrum tail. "
|
info = "Enables calculation of detector threshold underflow using exponential shape of energy spectrum tail. "
|
||||||
+ "Not recomended to use with floating window.")
|
+ "Not recomended to use with floating window.")
|
||||||
@ValueDef(name = "underflow.upperBorder", type = "NUMBER", def = "800", info = "Upper chanel for underflow calculation.")
|
@ValueDef(name = "underflow.upperBorder", type = "NUMBER", def = "800", info = "Upper chanel for underflow calculation.")
|
||||||
@ValueDef(name = "underflow.thresholdU", type = "NUMBER", def = "17000", info = "The maximum U for undeflow calculation")
|
@ValueDef(name = "underflow.threshold", type = "NUMBER", def = "17000", info = "The maximum U for undeflow calculation")
|
||||||
@ValueDef(name = "underflow.correctionFunction",
|
@ValueDef(name = "underflow.function", info = "An expression for underflow correction above threshold")
|
||||||
info = "An expression to correct coun tumber depending on potential U, count rate CR and point length T")
|
@ValueDef(name = "correction",
|
||||||
|
info = "An expression to correct count tumber depending on potential ${U}, point length ${T} and point itself as ${point}")
|
||||||
public class PrepareDataAction extends OneToOneAction<NumassData, Table> {
|
public class PrepareDataAction extends OneToOneAction<NumassData, Table> {
|
||||||
|
|
||||||
public static String[] parnames = {"Uset", "Uread", "Length", "Total", "Window", "Corr", "CR", "CRerr", "Timestamp"};
|
public static String[] parnames = {"Uset", "Uread", "Length", "Total", "Window", "Corr", "CR", "CRerr", "Timestamp"};
|
||||||
@ -79,9 +81,14 @@ public class PrepareDataAction extends OneToOneAction<NumassData, Table> {
|
|||||||
|
|
||||||
int upper = meta.getInt("upperWindow", RawNMPoint.MAX_CHANEL - 1);
|
int upper = meta.getInt("upperWindow", RawNMPoint.MAX_CHANEL - 1);
|
||||||
|
|
||||||
double deadTime = meta.getDouble("deadTime", 0);
|
Function<NMPoint, Double> deadTimeFunction;
|
||||||
// double bkg = source.meta().getDouble("background", this.meta().getDouble("background", 0));
|
if (meta.hasValue("deadTime")) {
|
||||||
|
deadTimeFunction = point -> evaluate(point, meta.getString("deadTime"));
|
||||||
|
} else {
|
||||||
|
deadTimeFunction = point -> 0.0;
|
||||||
|
}
|
||||||
|
|
||||||
|
// double bkg = source.meta().getDouble("background", this.meta().getDouble("background", 0));
|
||||||
List<DataPoint> dataList = new ArrayList<>();
|
List<DataPoint> dataList = new ArrayList<>();
|
||||||
for (NMPoint point : dataFile.getNMPoints()) {
|
for (NMPoint point : dataFile.getNMPoints()) {
|
||||||
|
|
||||||
@ -96,11 +103,11 @@ public class PrepareDataAction extends OneToOneAction<NumassData, Table> {
|
|||||||
long wind = point.getCountInWindow(a, b);
|
long wind = point.getCountInWindow(a, b);
|
||||||
|
|
||||||
// count rate after all corrections
|
// count rate after all corrections
|
||||||
double cr = point.getCountRate(a, b, deadTime);
|
double cr = point.getCountRate(a, b, deadTimeFunction.apply(point));
|
||||||
// count rate error after all corrections
|
// count rate error after all corrections
|
||||||
double crErr = point.getCountRateErr(a, b, deadTime);
|
double crErr = point.getCountRateErr(a, b, deadTimeFunction.apply(point));
|
||||||
|
|
||||||
double correctionFactor = getUnderflowCorrection(log, point, meta, cr);
|
double correctionFactor = correction(log, point, meta);
|
||||||
|
|
||||||
cr = cr * correctionFactor;
|
cr = cr * correctionFactor;
|
||||||
crErr = crErr * correctionFactor;
|
crErr = crErr * correctionFactor;
|
||||||
@ -136,6 +143,22 @@ public class PrepareDataAction extends OneToOneAction<NumassData, Table> {
|
|||||||
return data;
|
return data;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Evaluate groovy expression to number
|
||||||
|
*
|
||||||
|
* @param point
|
||||||
|
* @param expression
|
||||||
|
* @param countRate
|
||||||
|
* @return
|
||||||
|
*/
|
||||||
|
private double evaluate(NMPoint point, String expression) {
|
||||||
|
Map<String, Object> exprParams = new HashMap<>();
|
||||||
|
exprParams.put("T", point.getLength());
|
||||||
|
exprParams.put("U", point.getUread());
|
||||||
|
exprParams.put("point", point);
|
||||||
|
return ExpressionUtils.evaluate(expression, exprParams);
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The factor to correct for count below detector threshold
|
* The factor to correct for count below detector threshold
|
||||||
*
|
*
|
||||||
@ -145,30 +168,30 @@ public class PrepareDataAction extends OneToOneAction<NumassData, Table> {
|
|||||||
* @param countRate precalculated count rate in main window
|
* @param countRate precalculated count rate in main window
|
||||||
* @return
|
* @return
|
||||||
*/
|
*/
|
||||||
private double getUnderflowCorrection(Reportable log, NMPoint point, Laminate meta, double countRate) {
|
private double correction(Reportable log, NMPoint point, Laminate meta) {
|
||||||
if (meta.hasNode("underflow") || meta.getBoolean("underflow", true)) {
|
if (meta.hasValue("correction")) {
|
||||||
if (point.getUset() > meta.getDouble("underflow.thresholdU", 17000)) {
|
// log.report("Using correction from formula: {}", meta.getString("correction"));
|
||||||
if (meta.hasValue("underflow.correctionFunction")) {
|
return evaluate(point, meta.getString("correction"));
|
||||||
Map<String, Double> exprParams = new HashMap<>();
|
} else if (meta.hasNode("underflow")) {
|
||||||
exprParams.put("U", point.getUread());
|
if (point.getUset() >= meta.getDouble("underflow.threshold", 17000)) {
|
||||||
exprParams.put("CR", countRate);
|
// log.report("Using underflow factor from formula: {}", meta.getString("underflow.function"));
|
||||||
exprParams.put("T", point.getLength());
|
if (meta.hasValue("underflow.function")) {
|
||||||
return ExpressionUtils.evaluate(meta.getString("underflow.correctionFunction"), exprParams);
|
return evaluate(point, meta.getString("underflow.function"));
|
||||||
} else {
|
} else {
|
||||||
return 1;
|
return 1;
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
try {
|
try {
|
||||||
log.report("Calculating underflow correction coefficient for point {}", point.getUset());
|
// log.report("Calculating underflow correction coefficient for point {}", point.getUset());
|
||||||
int xLow = meta.getInt("underflow.lowerBorder", meta.getInt("lowerWindow"));
|
int xLow = meta.getInt("underflow.lowerBorder", meta.getInt("lowerWindow"));
|
||||||
int xHigh = meta.getInt("underflow.upperBorder", 800);
|
int xHigh = meta.getInt("underflow.upperBorder", 800);
|
||||||
int binning = meta.getInt("underflow.binning", 20);
|
int binning = meta.getInt("underflow.binning", 20);
|
||||||
int upper = meta.getInt("upperWindow", RawNMPoint.MAX_CHANEL - 1);
|
int upper = meta.getInt("upperWindow", RawNMPoint.MAX_CHANEL - 1);
|
||||||
long norm = point.getCountInWindow(xLow, upper);
|
long norm = point.getCountInWindow(xLow, upper);
|
||||||
double[] fitRes = getUnderflowExpParameters(point, xLow, xHigh, binning);
|
double[] fitRes = getUnderflowExpParameters(point, xLow, xHigh, binning);
|
||||||
log.report("Underflow interpolation function: {}*exp(c/{})", fitRes[0], fitRes[1]);
|
// log.report("Underflow interpolation function: {}*exp(c/{})", fitRes[0], fitRes[1]);
|
||||||
double correction = fitRes[0] * fitRes[1] * (Math.exp(xLow / fitRes[1]) - 1d) / norm + 1d;
|
double correction = fitRes[0] * fitRes[1] * (Math.exp(xLow / fitRes[1]) - 1d) / norm + 1d;
|
||||||
log.report("Underflow correction factor: {}", correction);
|
// log.report("Underflow correction factor: {}", correction);
|
||||||
return correction;
|
return correction;
|
||||||
} catch (Exception ex) {
|
} catch (Exception ex) {
|
||||||
log.reportError("Failed to calculate underflow parameters for point {} with message:", point.getUset(), ex.getMessage());
|
log.reportError("Failed to calculate underflow parameters for point {} with message:", point.getUset(), ex.getMessage());
|
||||||
|
@ -41,8 +41,10 @@ public class BetaSpectrum extends AbstractParametricFunction implements RangedNa
|
|||||||
|
|
||||||
public BetaSpectrum(File FSSFile) {
|
public BetaSpectrum(File FSSFile) {
|
||||||
super(list);
|
super(list);
|
||||||
|
if (FSSFile != null) {
|
||||||
this.fss = new FSS(FSSFile);
|
this.fss = new FSS(FSSFile);
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
double derivRoot(int n, double E0, double mnu2, double E) throws NotDefinedException {
|
double derivRoot(int n, double E0, double mnu2, double E) throws NotDefinedException {
|
||||||
//ограничение
|
//ограничение
|
||||||
|
@ -212,9 +212,9 @@ public class ModularSpectrum extends AbstractParametricFunction {
|
|||||||
|
|
||||||
this.caching = caching;
|
this.caching = caching;
|
||||||
this.trappingCache.setCachingEnabled(caching);
|
this.trappingCache.setCachingEnabled(caching);
|
||||||
for (NamedSpectrumCaching sp : this.cacheList) {
|
this.cacheList.stream().forEach((sp) -> {
|
||||||
sp.setCachingEnabled(caching);
|
sp.setCachingEnabled(caching);
|
||||||
}
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -0,0 +1,41 @@
|
|||||||
|
/*
|
||||||
|
* To change this license header, choose License Headers in Project Properties.
|
||||||
|
* To change this template file, choose Tools | Templates
|
||||||
|
* and open the template in the editor.
|
||||||
|
*/
|
||||||
|
package inr.numass.models;
|
||||||
|
|
||||||
|
import hep.dataforge.functions.AbstractParametricFunction;
|
||||||
|
import hep.dataforge.functions.ParametricFunction;
|
||||||
|
import hep.dataforge.values.NamedValueSet;
|
||||||
|
|
||||||
|
/**
|
||||||
|
*
|
||||||
|
* @author Alexander Nozik
|
||||||
|
*/
|
||||||
|
public class SterileNeutrinoSpectrum extends AbstractParametricFunction {
|
||||||
|
|
||||||
|
private static final String[] list = {"X", "trap", "E0", "mnu2", "msterile2", "U2"};
|
||||||
|
private ParametricFunction spectrum;
|
||||||
|
|
||||||
|
public SterileNeutrinoSpectrum() {
|
||||||
|
super(list);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public double derivValue(String parName, double x, NamedValueSet set) {
|
||||||
|
throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public double value(double x, NamedValueSet set) {
|
||||||
|
throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean providesDeriv(String name) {
|
||||||
|
throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
@ -17,7 +17,7 @@ import org.codehaus.groovy.control.customizers.ImportCustomizer;
|
|||||||
*/
|
*/
|
||||||
public class ExpressionUtils {
|
public class ExpressionUtils {
|
||||||
|
|
||||||
public static Double evaluate(String expression, Map<String, Double> binding) {
|
public static Double evaluate(String expression, Map<String, Object> binding) {
|
||||||
Binding b = new Binding(binding);
|
Binding b = new Binding(binding);
|
||||||
// Add imports for script.
|
// Add imports for script.
|
||||||
ImportCustomizer importCustomizer = new ImportCustomizer();
|
ImportCustomizer importCustomizer = new ImportCustomizer();
|
||||||
|
@ -72,35 +72,43 @@ public class NMEventGenerator {
|
|||||||
for (Map.Entry<Double, Double> entry : spectrum.entrySet()) {
|
for (Map.Entry<Double, Double> entry : spectrum.entrySet()) {
|
||||||
chanels[i] = entry.getKey();
|
chanels[i] = entry.getKey();
|
||||||
values[i] = entry.getValue();
|
values[i] = entry.getValue();
|
||||||
i++;
|
|
||||||
}
|
}
|
||||||
distribution = new EnumeratedRealDistribution(chanels, values);
|
distribution = new EnumeratedRealDistribution(chanels, values);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void loadSpectrum(NMPoint point, int minChanel, int maxChanel) {
|
public void loadSpectrum(NMPoint point) {
|
||||||
assert minChanel >= 0;
|
|
||||||
assert maxChanel <= RawNMPoint.MAX_CHANEL;
|
|
||||||
|
|
||||||
double[] chanels = new double[RawNMPoint.MAX_CHANEL];
|
double[] chanels = new double[RawNMPoint.MAX_CHANEL];
|
||||||
double[] values = new double[RawNMPoint.MAX_CHANEL];
|
double[] values = new double[RawNMPoint.MAX_CHANEL];
|
||||||
for (int i = 0; i < RawNMPoint.MAX_CHANEL; i++) {
|
for (int i = 0; i < RawNMPoint.MAX_CHANEL; i++) {
|
||||||
chanels[i] = i;
|
chanels[i] = i;
|
||||||
values[i] = point.getCountInChanel(i);
|
values[i] = point.getCountInChanel(i);
|
||||||
i++;
|
|
||||||
}
|
}
|
||||||
distribution = new EnumeratedRealDistribution(chanels, values);
|
distribution = new EnumeratedRealDistribution(chanels, values);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void loadSpectrum(NMPoint point, NMPoint reference, int minChanel, int maxChanel) {
|
public void loadSpectrum(NMPoint point, NMPoint reference) {
|
||||||
assert minChanel >= 0;
|
|
||||||
assert maxChanel <= RawNMPoint.MAX_CHANEL;
|
|
||||||
|
|
||||||
double[] chanels = new double[RawNMPoint.MAX_CHANEL];
|
double[] chanels = new double[RawNMPoint.MAX_CHANEL];
|
||||||
double[] values = new double[RawNMPoint.MAX_CHANEL];
|
double[] values = new double[RawNMPoint.MAX_CHANEL];
|
||||||
for (int i = 0; i < RawNMPoint.MAX_CHANEL; i++) {
|
for (int i = 0; i < RawNMPoint.MAX_CHANEL; i++) {
|
||||||
chanels[i] = i;
|
chanels[i] = i;
|
||||||
values[i] = Math.max(0, point.getCountInChanel(i) - reference.getCountInChanel(i));
|
values[i] = Math.max(0, point.getCountInChanel(i) - reference.getCountInChanel(i));
|
||||||
i++;
|
}
|
||||||
|
distribution = new EnumeratedRealDistribution(chanels, values);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
*
|
||||||
|
* @param point
|
||||||
|
* @param reference
|
||||||
|
* @param lower lower channel for spectrum generation
|
||||||
|
* @param upper upper channel for spectrum generation
|
||||||
|
*/
|
||||||
|
public void loadSpectrum(NMPoint point, NMPoint reference, int lower, int upper) {
|
||||||
|
double[] chanels = new double[RawNMPoint.MAX_CHANEL];
|
||||||
|
double[] values = new double[RawNMPoint.MAX_CHANEL];
|
||||||
|
for (int i = lower; i < upper; i++) {
|
||||||
|
chanels[i] = i;
|
||||||
|
values[i] = Math.max(0, point.getCountInChanel(i) - (reference == null ? 0 : reference.getCountInChanel(i)));
|
||||||
}
|
}
|
||||||
distribution = new EnumeratedRealDistribution(chanels, values);
|
distribution = new EnumeratedRealDistribution(chanels, values);
|
||||||
}
|
}
|
||||||
|
@ -32,15 +32,21 @@ public class PileUpSimulator {
|
|||||||
this.pointLength = length;
|
this.pointLength = length;
|
||||||
}
|
}
|
||||||
|
|
||||||
public PileUpSimulator withGenerator(NMPoint spectrum, NMPoint reference, int lowerChannel, int upperChannel) {
|
public PileUpSimulator withGenerator(NMPoint spectrum, NMPoint reference) {
|
||||||
this.uSet = spectrum.getUset();
|
this.uSet = spectrum.getUset();
|
||||||
generator.loadSpectrum(spectrum, reference, lowerChannel, upperChannel);
|
generator.loadSpectrum(spectrum, reference);
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
public PileUpSimulator withGenerator(NMPoint spectrum, int lowerChannel, int upperChannel) {
|
public PileUpSimulator withGenerator(NMPoint spectrum, NMPoint reference, int from, int to) {
|
||||||
this.uSet = spectrum.getUset();
|
this.uSet = spectrum.getUset();
|
||||||
generator.loadSpectrum(spectrum, lowerChannel, upperChannel);
|
generator.loadSpectrum(spectrum, reference, from, to);
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public PileUpSimulator withGenerator(NMPoint spectrum) {
|
||||||
|
this.uSet = spectrum.getUset();
|
||||||
|
generator.loadSpectrum(spectrum);
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -102,18 +108,22 @@ public class PileUpSimulator {
|
|||||||
|
|
||||||
public synchronized PileUpSimulator generate() {
|
public synchronized PileUpSimulator generate() {
|
||||||
NMEvent current = null;
|
NMEvent current = null;
|
||||||
|
boolean pileupFlag = false;
|
||||||
while (true) {
|
while (true) {
|
||||||
NMEvent next = generator.nextEvent(current);
|
NMEvent next = generator.nextEvent(current);
|
||||||
if (next.getTime() > pointLength) {
|
if (next.getTime() > pointLength) {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
generated.add(next.clone());
|
generated.add(next.clone());
|
||||||
|
//flag that shows that previous event was pileup
|
||||||
|
//not counting double pileups
|
||||||
if (current != null) {
|
if (current != null) {
|
||||||
double delay = (next.getTime() - current.getTime()) / us;
|
double delay = (next.getTime() - current.getTime()) / us;
|
||||||
if (nextEventRegistered(delay)) {
|
if (nextEventRegistered(delay)) {
|
||||||
//just register new event
|
//just register new event
|
||||||
registred.add(next.clone());
|
registred.add(next.clone());
|
||||||
} else if (pileup(delay)) {
|
pileupFlag = false;
|
||||||
|
} else if (pileup(delay) && !pileupFlag) {
|
||||||
//pileup event
|
//pileup event
|
||||||
short newChannel = pileupChannel(delay, current.getChanel(), next.getChanel());
|
short newChannel = pileupChannel(delay, current.getChanel(), next.getChanel());
|
||||||
NMEvent newEvent = new NMEvent(newChannel, current.getTime());
|
NMEvent newEvent = new NMEvent(newChannel, current.getTime());
|
||||||
@ -121,6 +131,7 @@ public class PileUpSimulator {
|
|||||||
registred.remove(registred.size() - 1);
|
registred.remove(registred.size() - 1);
|
||||||
registred.add(newEvent.clone());
|
registred.add(newEvent.clone());
|
||||||
pileup.add(newEvent.clone());
|
pileup.add(newEvent.clone());
|
||||||
|
pileupFlag = true;
|
||||||
} else {
|
} else {
|
||||||
// second event not registered
|
// second event not registered
|
||||||
}
|
}
|
||||||
|
@ -23,7 +23,7 @@ public class PrepareDataActionTest {
|
|||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testExpression() {
|
public void testExpression() {
|
||||||
Map<String, Double> exprParams = new HashMap<>();
|
Map<String, Object> exprParams = new HashMap<>();
|
||||||
exprParams.put("U", 18000d);
|
exprParams.put("U", 18000d);
|
||||||
double correctionFactor = ExpressionUtils.evaluate("1 + 13.265 * exp(- U / 2343.4)", exprParams);
|
double correctionFactor = ExpressionUtils.evaluate("1 + 13.265 * exp(- U / 2343.4)", exprParams);
|
||||||
Assert.assertEquals("Testing expression calculation", 1.006125, correctionFactor, 1e-5);
|
Assert.assertEquals("Testing expression calculation", 1.006125, correctionFactor, 1e-5);
|
||||||
|
Loading…
Reference in New Issue
Block a user