Numass underflow update
This commit is contained in:
parent
8861dff73d
commit
44c42758c6
@ -14,7 +14,6 @@ import inr.numass.storage.NumassDataUtils
|
|||||||
import inr.numass.storage.NumassStorage
|
import inr.numass.storage.NumassStorage
|
||||||
import inr.numass.utils.UnderflowCorrection
|
import inr.numass.utils.UnderflowCorrection
|
||||||
|
|
||||||
|
|
||||||
File rootDir = new File("D:\\Work\\Numass\\data\\2016_10\\Fill_1")
|
File rootDir = new File("D:\\Work\\Numass\\data\\2016_10\\Fill_1")
|
||||||
|
|
||||||
NumassStorage storage = NumassStorage.buildLocalNumassRoot(rootDir, true);
|
NumassStorage storage = NumassStorage.buildLocalNumassRoot(rootDir, true);
|
||||||
@ -49,6 +48,39 @@ data = NumassDataUtils.substractReferencePoint(data, 18600d);
|
|||||||
// }
|
// }
|
||||||
//}
|
//}
|
||||||
|
|
||||||
Table t = new UnderflowCorrection().fitAllPoints(data, 400, 750, 3100, 20);
|
def printPoint(Iterable<NMPoint> data, List us, int binning = 20, normalize = false) {
|
||||||
|
List<NMPoint> points = data.findAll { it.uset in us }.sort { it.uset }
|
||||||
|
|
||||||
|
Map spectra = points.first().getMapWithBinning(binning, normalize).collectEntries { key, value ->
|
||||||
|
[key, [value]]
|
||||||
|
};
|
||||||
|
|
||||||
|
points.eachWithIndex { it, index ->
|
||||||
|
if (index > 0) {
|
||||||
|
print "\t${it.uset}"
|
||||||
|
it.getMapWithBinning(binning, normalize).each { k, v ->
|
||||||
|
spectra[k].add(v)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
println()
|
||||||
|
|
||||||
|
spectra.each { key, value ->
|
||||||
|
print key
|
||||||
|
value.each {
|
||||||
|
print "\t${it}"
|
||||||
|
}
|
||||||
|
println()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
println "\n# spectra\n"
|
||||||
|
|
||||||
|
printPoint(data, [16200d, 16400d, 16800d, 17000d, 17200d])
|
||||||
|
|
||||||
|
println()
|
||||||
|
|
||||||
|
Table t = new UnderflowCorrection().fitAllPoints(data, 400, 700, 3100, 20);
|
||||||
ColumnedDataWriter.writeDataSet(System.out, t, "underflow parameters")
|
ColumnedDataWriter.writeDataSet(System.out, t, "underflow parameters")
|
||||||
|
|
||||||
|
@ -9,12 +9,11 @@ import hep.dataforge.maths.integration.GaussRuleIntegrator;
|
|||||||
import hep.dataforge.maths.integration.UnivariateIntegrator;
|
import hep.dataforge.maths.integration.UnivariateIntegrator;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*
|
|
||||||
* @author Alexander Nozik
|
* @author Alexander Nozik
|
||||||
*/
|
*/
|
||||||
public class NumassIntegrator {
|
public class NumassIntegrator {
|
||||||
|
|
||||||
private static double mult = 1.0;
|
private static double mult = 1.0;//for testing purposes
|
||||||
|
|
||||||
private static UnivariateIntegrator fastInterator;
|
private static UnivariateIntegrator fastInterator;
|
||||||
private static UnivariateIntegrator defaultIntegrator;
|
private static UnivariateIntegrator defaultIntegrator;
|
||||||
@ -22,21 +21,21 @@ public class NumassIntegrator {
|
|||||||
|
|
||||||
public static UnivariateIntegrator getFastInterator() {
|
public static UnivariateIntegrator getFastInterator() {
|
||||||
if (fastInterator == null) {
|
if (fastInterator == null) {
|
||||||
fastInterator = new GaussRuleIntegrator((int) (mult*100));
|
fastInterator = new GaussRuleIntegrator((int) (mult * 100));
|
||||||
}
|
}
|
||||||
return fastInterator;
|
return fastInterator;
|
||||||
}
|
}
|
||||||
|
|
||||||
public static UnivariateIntegrator getDefaultIntegrator() {
|
public static UnivariateIntegrator getDefaultIntegrator() {
|
||||||
if (defaultIntegrator == null) {
|
if (defaultIntegrator == null) {
|
||||||
defaultIntegrator = new GaussRuleIntegrator((int) (mult*300));
|
defaultIntegrator = new GaussRuleIntegrator((int) (mult * 300));
|
||||||
}
|
}
|
||||||
return defaultIntegrator;
|
return defaultIntegrator;
|
||||||
}
|
}
|
||||||
|
|
||||||
public static UnivariateIntegrator getHighDensityIntegrator() {
|
public static UnivariateIntegrator getHighDensityIntegrator() {
|
||||||
if (highDensityIntegrator == null) {
|
if (highDensityIntegrator == null) {
|
||||||
highDensityIntegrator = new GaussRuleIntegrator((int) (mult*500));
|
highDensityIntegrator = new GaussRuleIntegrator((int) (mult * 500));
|
||||||
}
|
}
|
||||||
return highDensityIntegrator;
|
return highDensityIntegrator;
|
||||||
}
|
}
|
||||||
|
@ -62,9 +62,13 @@ public class UnderflowCorrection {
|
|||||||
public Table fitAllPoints(Iterable<NMPoint> data, int xLow, int xHigh, int upper, int binning) {
|
public Table fitAllPoints(Iterable<NMPoint> data, int xLow, int xHigh, int upper, int binning) {
|
||||||
ListTable.Builder builder = new ListTable.Builder("U", "amp", "expConst", "correction");
|
ListTable.Builder builder = new ListTable.Builder("U", "amp", "expConst", "correction");
|
||||||
for (NMPoint point : data) {
|
for (NMPoint point : data) {
|
||||||
double norm = ((double) point.getCountInWindow(xLow, upper))/point.getLength();
|
double norm = ((double) point.getCountInWindow(xLow, upper)) / point.getLength();
|
||||||
double[] fitRes = getUnderflowExpParameters(point, xLow, xHigh, binning);
|
double[] fitRes = getUnderflowExpParameters(point, xLow, xHigh, binning);
|
||||||
builder.row(point.getUset(), fitRes[0], fitRes[1], fitRes[0] * fitRes[1] * (Math.exp(xLow / fitRes[1]) - 1d) / norm + 1d);
|
double a = fitRes[0];
|
||||||
|
double sigma = fitRes[1];
|
||||||
|
|
||||||
|
//builder.row(point.getUset(), a, sigma, (a * sigma * (Math.exp(xLow / sigma) - 1) - a*xLow) / norm + 1d);
|
||||||
|
builder.row(point.getUset(), a, sigma, a * sigma * (Math.exp(xLow / sigma) - 1) / norm + 1d);
|
||||||
}
|
}
|
||||||
return builder.build();
|
return builder.build();
|
||||||
}
|
}
|
||||||
@ -103,15 +107,15 @@ public class UnderflowCorrection {
|
|||||||
* Exponential function for fitting
|
* Exponential function for fitting
|
||||||
*/
|
*/
|
||||||
private static class ExponentFunction implements ParametricUnivariateFunction {
|
private static class ExponentFunction implements ParametricUnivariateFunction {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public double value(double x, double... parameters) {
|
public double value(double x, double... parameters) {
|
||||||
if (parameters.length != 2) {
|
if (parameters.length != 2) {
|
||||||
throw new DimensionMismatchException(parameters.length, 2);
|
throw new DimensionMismatchException(parameters.length, 2);
|
||||||
}
|
}
|
||||||
double a = parameters[0];
|
double a = parameters[0];
|
||||||
double x0 = parameters[1];
|
double sigma = parameters[1];
|
||||||
return a * Math.exp(x / x0);
|
//return a * (Math.exp(x / sigma) - 1);
|
||||||
|
return a * Math.exp(x / sigma);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -120,8 +124,11 @@ public class UnderflowCorrection {
|
|||||||
throw new DimensionMismatchException(parameters.length, 2);
|
throw new DimensionMismatchException(parameters.length, 2);
|
||||||
}
|
}
|
||||||
double a = parameters[0];
|
double a = parameters[0];
|
||||||
double x0 = parameters[1];
|
double sigma = parameters[1];
|
||||||
return new double[]{Math.exp(x / x0), -a * x / x0 / x0 * Math.exp(x / x0)};
|
return new double[]{
|
||||||
|
Math.exp(x / sigma),
|
||||||
|
-a * x / sigma / sigma * Math.exp(x / sigma)
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -5,7 +5,7 @@ if (!hasProperty('mainClass')) {
|
|||||||
}
|
}
|
||||||
mainClassName = mainClass
|
mainClassName = mainClass
|
||||||
|
|
||||||
version = "0.3.7"
|
version = "0.3.7 - SNAPSHOT"
|
||||||
|
|
||||||
description = "The viewer for numass data"
|
description = "The viewer for numass data"
|
||||||
|
|
||||||
@ -26,3 +26,9 @@ dependencies {
|
|||||||
compile project(':dataforge-fx')
|
compile project(':dataforge-fx')
|
||||||
compile 'com.jcraft:jsch:0.1.53'
|
compile 'com.jcraft:jsch:0.1.53'
|
||||||
}
|
}
|
||||||
|
|
||||||
|
shadowJar {
|
||||||
|
baseName = 'numass-viewer'
|
||||||
|
classifier = null
|
||||||
|
version = null
|
||||||
|
}
|
@ -27,10 +27,7 @@ import hep.dataforge.io.ColumnedDataWriter;
|
|||||||
import hep.dataforge.meta.Meta;
|
import hep.dataforge.meta.Meta;
|
||||||
import hep.dataforge.meta.MetaBuilder;
|
import hep.dataforge.meta.MetaBuilder;
|
||||||
import hep.dataforge.plots.XYPlotFrame;
|
import hep.dataforge.plots.XYPlotFrame;
|
||||||
import hep.dataforge.plots.data.PlotDataUtils;
|
import hep.dataforge.plots.data.*;
|
||||||
import hep.dataforge.plots.data.PlottableData;
|
|
||||||
import hep.dataforge.plots.data.TimePlottable;
|
|
||||||
import hep.dataforge.plots.data.TimePlottableGroup;
|
|
||||||
import hep.dataforge.plots.fx.FXPlotFrame;
|
import hep.dataforge.plots.fx.FXPlotFrame;
|
||||||
import hep.dataforge.plots.fx.PlotContainer;
|
import hep.dataforge.plots.fx.PlotContainer;
|
||||||
import hep.dataforge.plots.jfreechart.JFreeChartFrame;
|
import hep.dataforge.plots.jfreechart.JFreeChartFrame;
|
||||||
@ -324,7 +321,7 @@ public class NumassLoaderViewComponent extends AnchorPane implements Initializab
|
|||||||
* update detector pane with new data
|
* update detector pane with new data
|
||||||
*/
|
*/
|
||||||
private void updateDetectorPane(List<NMPoint> points, int binning, boolean normalize) {
|
private void updateDetectorPane(List<NMPoint> points, int binning, boolean normalize) {
|
||||||
FXPlotFrame detectorPlotFrame;
|
FXPlotFrame<XYPlottable> detectorPlotFrame;
|
||||||
if (detectorPlot.getPlot() == null) {
|
if (detectorPlot.getPlot() == null) {
|
||||||
Meta frameMeta = new MetaBuilder("frame")
|
Meta frameMeta = new MetaBuilder("frame")
|
||||||
.setValue("title", "Detector response plot")
|
.setValue("title", "Detector response plot")
|
||||||
|
Loading…
Reference in New Issue
Block a user