Numass underflow update

This commit is contained in:
darksnake 2017-07-30 16:42:56 +03:00
parent bc1b79784d
commit 5ea87aae51
15 changed files with 293 additions and 197 deletions

View File

@ -11,7 +11,8 @@ import inr.numass.data.api.NumassPoint;
import inr.numass.data.api.NumassSet; import inr.numass.data.api.NumassSet;
import java.util.Collection; import java.util.Collection;
import java.util.Optional; import java.util.Map;
import java.util.stream.Collectors;
import java.util.stream.Stream; import java.util.stream.Stream;
import static hep.dataforge.tables.XYAdapter.*; import static hep.dataforge.tables.XYAdapter.*;
@ -56,21 +57,25 @@ public class NumassDataUtils {
.addNumber(COUNT_RATE_KEY, Y_VALUE_KEY) .addNumber(COUNT_RATE_KEY, Y_VALUE_KEY)
.addNumber(COUNT_RATE_ERROR_KEY, Y_ERROR_KEY) .addNumber(COUNT_RATE_ERROR_KEY, Y_ERROR_KEY)
.build(); .build();
//indexing table elements
Map<Double, Values> t1 = sp1.getRows().collect(Collectors.toMap(row -> row.getDouble(CHANNEL_KEY), row -> row));
Map<Double, Values> t2 = sp2.getRows().collect(Collectors.toMap(row -> row.getDouble(CHANNEL_KEY), row -> row));
ListTable.Builder builder = new ListTable.Builder(format); ListTable.Builder builder = new ListTable.Builder(format);
for (int i = 0; i < sp1.size(); i++) {
Values row1 = sp1.getRow(i);
Optional<Values> row2 = sp2.getRows().filter(row -> row.getDouble(CHANNEL_KEY) == row1.getDouble(CHANNEL_KEY)).findFirst();
double value1 = row1.getDouble(COUNT_RATE_KEY); t1.forEach((channel, row1) -> {
double value2 = row2.map(it -> it.getDouble(COUNT_RATE_KEY)).orElse(0d); Values row2 = t2.get(channel);
if (row2 == null) {
double error1 = row1.getDouble(COUNT_RATE_ERROR_KEY); builder.row(row1);
double error2 = row2.map(it-> it.getDouble(COUNT_RATE_ERROR_KEY)).orElse(0d); } else {
double value = Math.max(row1.getDouble(COUNT_RATE_KEY) - row2.getDouble(COUNT_RATE_KEY), 0);
double value = value1 - value2; double error1 = row1.getDouble(COUNT_RATE_ERROR_KEY);
double error = Math.sqrt(error1*error1 + error2*error2); double error2 = row2.getDouble(COUNT_RATE_ERROR_KEY);
builder.row(sp1.get(CHANNEL_KEY, i).intValue(), value, error); double error = Math.sqrt(error1 * error1 + error2 * error2);
} builder.row(channel, value, error);
}
});
return builder.build(); return builder.build();
} }

View File

@ -64,13 +64,13 @@ public class SpectrumDataAdapter extends XYAdapter {
} }
public Values buildSpectrumDataPoint(double x, long count, double t) { public Values buildSpectrumDataPoint(double x, long count, double t) {
return new ValueMap(new String[]{nameFor(X_VALUE_KEY), nameFor(Y_VALUE_KEY), return ValueMap.of(new String[]{nameFor(X_VALUE_KEY), nameFor(Y_VALUE_KEY),
nameFor(POINT_LENGTH_NAME)}, nameFor(POINT_LENGTH_NAME)},
x, count, t); x, count, t);
} }
public Values buildSpectrumDataPoint(double x, long count, double countErr, double t) { public Values buildSpectrumDataPoint(double x, long count, double countErr, double t) {
return new ValueMap(new String[]{nameFor(X_VALUE_KEY), nameFor(Y_VALUE_KEY), return ValueMap.of(new String[]{nameFor(X_VALUE_KEY), nameFor(Y_VALUE_KEY),
nameFor(Y_ERROR_KEY), nameFor(POINT_LENGTH_NAME)}, nameFor(Y_ERROR_KEY), nameFor(POINT_LENGTH_NAME)},
x, count, countErr, t); x, count, countErr, t);
} }

View File

@ -30,28 +30,22 @@ public class SimpleAnalyzer extends AbstractAnalyzer {
double countRateError = Math.sqrt((double) count) / block.getLength().toMillis() * 1000; double countRateError = Math.sqrt((double) count) / block.getLength().toMillis() * 1000;
if (block instanceof NumassPoint) { if (block instanceof NumassPoint) {
return new ValueMap(NAME_LIST_WITH_HV, return ValueMap.of(NAME_LIST_WITH_HV,
new Object[]{ ((NumassPoint) block).getVoltage(),
((NumassPoint) block).getVoltage(), block.getLength().toNanos(),
block.getLength().toNanos(), count,
count, countRate,
countRate, countRateError,
countRateError, new int[]{loChannel, upChannel},
new int[]{loChannel, upChannel}, block.getStartTime());
block.getStartTime()
}
);
} else { } else {
return new ValueMap(NAME_LIST, return ValueMap.of(NAME_LIST,
new Object[]{ block.getLength().toNanos(),
block.getLength().toNanos(), count,
count, countRate,
countRate, countRateError,
countRateError, new int[]{loChannel, upChannel},
new int[]{loChannel, upChannel}, block.getStartTime());
block.getStartTime()
}
);
} }
} }

View File

@ -44,33 +44,28 @@ public class TimeAnalyzer extends AbstractAnalyzer {
} }
}); });
double countRate = 1e6 * totalN.get() / (totalT.get()/1000 - t0 * totalN.get()/1000);//1e9 / (totalT.get() / totalN.get() - t0); double countRate = 1e6 * totalN.get() / (totalT.get() / 1000 - t0 * totalN.get() / 1000);//1e9 / (totalT.get() / totalN.get() - t0);
double countRateError = countRate / Math.sqrt(totalN.get()); double countRateError = countRate / Math.sqrt(totalN.get());
long count = (long) (totalT.get() * (countRate / 1e9)); long count = (long) (totalT.get() * (countRate / 1e9));
double length = totalT.get(); double length = totalT.get();
if (block instanceof NumassPoint) { if (block instanceof NumassPoint) {
return new ValueMap(NAME_LIST_WITH_HV, return ValueMap.of(NAME_LIST_WITH_HV,
new Object[]{ ((NumassPoint) block).getVoltage(),
((NumassPoint) block).getVoltage(), length,
length, count,
count, countRate,
countRate, countRateError,
countRateError, new int[]{loChannel, upChannel},
new int[]{loChannel, upChannel}, block.getStartTime());
block.getStartTime()
}
);
} else { } else {
return new ValueMap(NAME_LIST, return ValueMap.of(NAME_LIST,
new Object[]{ length,
length, count,
count, countRate,
countRate, countRateError,
countRateError, new int[]{loChannel, upChannel},
new int[]{loChannel, upChannel}, block.getStartTime()
block.getStartTime()
}
); );
} }
} }

View File

@ -61,8 +61,8 @@ public interface NumassAnalyzer {
int c = row.getInt(CHANNEL_KEY); int c = row.getInt(CHANNEL_KEY);
return c >= binLo && c <= binUp; return c >= binLo && c <= binUp;
}).forEach(row -> { }).forEach(row -> {
count.addAndGet(row.getValue(COUNT_KEY).numberValue().longValue()); count.addAndGet(row.getValue(COUNT_KEY, 0).longValue());
countRate.accumulateAndGet(row.getDouble(COUNT_RATE_KEY), (d1, d2) -> d1 + d2); countRate.accumulateAndGet(row.getDouble(COUNT_RATE_KEY, 0), (d1, d2) -> d1 + d2);
}); });
int bin = Math.min(binSize, upChannel - chan); int bin = Math.min(binSize, upChannel - chan);
builder.row((double) chan + (double) bin / 2d, count.get(), countRate.get(), bin); builder.row((double) chan + (double) bin / 2d, count.get(), countRate.get(), bin);
@ -135,7 +135,7 @@ public interface NumassAnalyzer {
.filter(i -> spectrum[i] != null) .filter(i -> spectrum[i] != null)
.mapToObj(i -> { .mapToObj(i -> {
long value = spectrum[i].get(); long value = spectrum[i].get();
return new ValueMap( return ValueMap.of(
format.namesAsArray(), format.namesAsArray(),
i, i,
value, value,

View File

@ -1,123 +0,0 @@
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package inr.numass.scripts
import hep.dataforge.grind.Grind
import hep.dataforge.meta.Meta
import hep.dataforge.storage.commons.StorageUtils
import hep.dataforge.tables.Table
import inr.numass.data.NumassDataUtils
import inr.numass.data.analyzers.TimeAnalyzer
import inr.numass.data.api.NumassAnalyzer
import inr.numass.data.api.NumassPoint
import inr.numass.data.api.SimpleNumassPoint
import inr.numass.data.storage.NumassStorage
import inr.numass.data.storage.NumassStorageFactory
import java.util.stream.Collectors
import static inr.numass.data.api.NumassAnalyzer.CHANNEL_KEY
import static inr.numass.data.api.NumassAnalyzer.COUNT_RATE_KEY
//Defining root directory
//File rootDir = new File("D:\\Work\\Numass\\data\\2016_10\\Fill_1")
//File rootDir = new File("D:\\Work\\Numass\\data\\2016_10\\Fill_2_wide")
//File rootDir = new File("D:\\Work\\Numass\\data\\2017_01\\Fill_2_wide")
File rootDir = new File("D:\\Work\\Numass\\data\\2017_05\\Fill_2")
//creating storage instance
NumassStorage storage = NumassStorageFactory.buildLocal(rootDir);
//Reading points
Map<Double, List<NumassPoint>> allPoints = StorageUtils
.loaderStream(storage)
.filter { it.key.matches("set_.{1,3}") }
.map {
println "loading ${it.key}"
it.value
}.flatMap { it.points }
.collect(Collectors.groupingBy { it.voltage })
Meta analyzerMeta = Grind.buildMeta(t0: 3e4)
NumassAnalyzer analyzer = new TimeAnalyzer()
//creating spectra
Map spectra = allPoints.collectEntries {
def point = new SimpleNumassPoint(it.key, it.value)
println "generating spectrum for ${point.voltage}"
return [(point.voltage): analyzer.getSpectrum(point, analyzerMeta)]
}
def refereceVoltage = 18600d
int binning = 20
//subtracting reference point
if (refereceVoltage) {
def referencePoint = spectra[refereceVoltage]
spectra = spectra.findAll { it.key != refereceVoltage }.collectEntries {
return [(it.key): NumassDataUtils.subtractSpectrum(it.getValue() as Table, referencePoint as Table)]
}
}
//Apply binning
if (binning) {
spectra = spectra.collectEntries {
[(it.key): NumassAnalyzer.spectrumWithBinning(it.value as Table, binning)];
}
}
//printing selected points
def printPoint = { Map<Double, Table> points ->
print "channel"
points.each { print "\t${it.key}" }
println()
def firstPoint = points.values().first()
(0..firstPoint.size()).each { i ->
print firstPoint.get(CHANNEL_KEY, i).intValue()
points.values().each {
print "\t${it.get(COUNT_RATE_KEY, i).doubleValue()}"
}
println()
}
println()
}
println "\n# spectra\n"
//printPoint(data, [16200d, 16400d, 16800d, 17000d, 17200d, 17700d])
printPoint(spectra.findAll { it.key in [16200d, 16400d, 16800d, 17000d, 17200d, 17700d] })
println()
//Table t = new UnderflowCorrection().fitAllPoints(data, 350, 550, 3100, 20);
//ColumnedDataWriter.writeTable(System.out, t, "underflow parameters")
//println "Empty files:"
//Collection<NMPoint> emptySpectra = NumassDataUtils.joinSpectra(
// StorageUtils.loaderStream(storage).filter{ it.key.matches("Empty.*")}.map {
// println it.key
// it.value
// }
//)
//emptySpectra = NumassDataUtils.substractReferencePoint(emptySpectra,18600);
//
//data = data.collect { point ->
// NMPoint ref = emptySpectra.find { it.u == point.u }
// if (ref) {
// println "substracting tritium background for point ${point.u}"
// NumassDataUtils.substractPoint(point, ref)
// } else {
// println "point ${point.u} skipped"
// point
// }
//}

View File

@ -59,14 +59,14 @@ new GrindShell(ctx).eval {
// point.blocks.eachWithIndex { block, index -> // point.blocks.eachWithIndex { block, index ->
// def statPlotPoints = t0.collect { // def statPlotPoints = t0.collect {
// def result = PointAnalyzer.analyze(block, t0: it, "window.lo": loChannel, "window.up": upChannel) // def result = PointAnalyzer.analyze(block, t0: it, "window.lo": loChannel, "window.up": upChannel)
// ValueMap.fromMap("x": it / 1000, "y": result.getDouble("cr"), "y.err": result.getDouble("cr.err")); // ValueMap.ofMap("x": it / 1000, "y": result.getDouble("cr"), "y.err": result.getDouble("cr.err"));
// } // }
// plot.plot(name: index, frame: "stat-method", showLine: true, statPlotPoints) // plot.plot(name: index, frame: "stat-method", showLine: true, statPlotPoints)
// } // }
def statPlotPoints = t0.collect { def statPlotPoints = t0.collect {
def result = PointAnalyzer.analyze(point, t0: it, "window.lo": loChannel, "window.up": upChannel) def result = PointAnalyzer.analyze(point, t0: it, "window.lo": loChannel, "window.up": upChannel)
ValueMap.fromMap("x": it / 1000, "y": result.getDouble("cr"), "y.err": result.getDouble("cr.err")); ValueMap.ofMap("x": it / 1000, "y": result.getDouble("cr"), "y.err": result.getDouble("cr.err"));
} }
plot.plot(name: "total", frame: "stat-method", showLine: true, statPlotPoints) plot.plot(name: "total", frame: "stat-method", showLine: true, statPlotPoints)
@ -75,7 +75,7 @@ new GrindShell(ctx).eval {
// def t1 = it // def t1 = it
// def t2 = it + delta // def t2 = it + delta
// def result = PointAnalyzer.count(point, t1, t2, loChannel, upChannel) - (Math.exp(-trueCR * t1) - Math.exp(-trueCR * t2)) * point.length * trueCR // def result = PointAnalyzer.count(point, t1, t2, loChannel, upChannel) - (Math.exp(-trueCR * t1) - Math.exp(-trueCR * t2)) * point.length * trueCR
// ValueMap.fromMap("x.value": it + delta / 2, "y.value": result); // ValueMap.ofMap("x.value": it + delta / 2, "y.value": result);
// } // }
// //
// plot.plot(name: hv, frame: "discrepancy", discrepancyPlotPoints) // plot.plot(name: hv, frame: "discrepancy", discrepancyPlotPoints)

View File

@ -46,14 +46,14 @@ new GrindShell(ctx).eval {
// point.blocks.eachWithIndex { block, index -> // point.blocks.eachWithIndex { block, index ->
// def statPlotPoints = t0.collect { // def statPlotPoints = t0.collect {
// def result = PointAnalyzer.analyze(block, t0: it, "window.lo": loChannel, "window.up": upChannel) // def result = PointAnalyzer.analyze(block, t0: it, "window.lo": loChannel, "window.up": upChannel)
// ValueMap.fromMap("x": it / 1000, "y": result.getDouble("cr"), "y.err": result.getDouble("cr.err")); // ValueMap.ofMap("x": it / 1000, "y": result.getDouble("cr"), "y.err": result.getDouble("cr.err"));
// } // }
// plot.plot(name: index, frame: "stat-method", showLine: true, statPlotPoints) // plot.plot(name: index, frame: "stat-method", showLine: true, statPlotPoints)
// } // }
def statPlotPoints = t0.collect { def statPlotPoints = t0.collect {
def result = PointAnalyzer.analyze(point, t0: it, "window.lo": loChannel, "window.up": upChannel) def result = PointAnalyzer.analyze(point, t0: it, "window.lo": loChannel, "window.up": upChannel)
ValueMap.fromMap("x": it / 1000, "y": result.getDouble("cr"), "y.err": result.getDouble("cr.err")); ValueMap.ofMap("x": it / 1000, "y": result.getDouble("cr"), "y.err": result.getDouble("cr.err"));
} }
plot.plot(name: "total", frame: "stat-method", showLine: true, thickness: 4, statPlotPoints) plot.plot(name: "total", frame: "stat-method", showLine: true, thickness: 4, statPlotPoints)
@ -64,7 +64,7 @@ new GrindShell(ctx).eval {
// def t1 = it // def t1 = it
// def t2 = it + delta // def t2 = it + delta
// def result = PointAnalyzer.count(point, t1, t2, loChannel, upChannel) - (Math.exp(-trueCR * t1) - Math.exp(-trueCR * t2)) * point.length * trueCR // def result = PointAnalyzer.count(point, t1, t2, loChannel, upChannel) - (Math.exp(-trueCR * t1) - Math.exp(-trueCR * t2)) * point.length * trueCR
// ValueMap.fromMap("x.value": it + delta / 2, "y.value": result); // ValueMap.ofMap("x.value": it + delta / 2, "y.value": result);
// } // }
// //
// plot.plot(name: hv, frame: "discrepancy", discrepancyPlotPoints) // plot.plot(name: hv, frame: "discrepancy", discrepancyPlotPoints)

View File

@ -0,0 +1,118 @@
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package inr.numass.scripts.underflow
import hep.dataforge.context.Context
import hep.dataforge.context.Global
import hep.dataforge.grind.Grind
import hep.dataforge.grind.GrindShell
import hep.dataforge.grind.helpers.PlotHelper
import hep.dataforge.io.ColumnedDataWriter
import hep.dataforge.meta.Meta
import hep.dataforge.plots.data.PlottableData
import hep.dataforge.plots.data.PlottableGroup
import hep.dataforge.plots.fx.FXPlotManager
import hep.dataforge.storage.commons.StorageUtils
import hep.dataforge.tables.Table
import hep.dataforge.tables.TableTransform
import hep.dataforge.tables.XYAdapter
import inr.numass.NumassPlugin
import inr.numass.data.NumassDataUtils
import inr.numass.data.analyzers.TimeAnalyzer
import inr.numass.data.api.NumassAnalyzer
import inr.numass.data.api.NumassPoint
import inr.numass.data.api.SimpleNumassPoint
import inr.numass.data.storage.NumassStorage
import inr.numass.data.storage.NumassStorageFactory
import java.util.stream.Collectors
import static inr.numass.data.api.NumassAnalyzer.CHANNEL_KEY
import static inr.numass.data.api.NumassAnalyzer.COUNT_RATE_KEY
Context ctx = Global.instance()
ctx.pluginManager().load(FXPlotManager)
ctx.pluginManager().load(NumassPlugin.class)
new GrindShell(ctx).eval {
//Defining root directory
File dataDirectory = new File("D:\\Work\\Numass\\data\\2017_05\\Fill_2")
//creating storage instance
NumassStorage storage = NumassStorageFactory.buildLocal(dataDirectory);
//Reading points
Map<Double, List<NumassPoint>> allPoints = StorageUtils
.loaderStream(storage)
.filter { it.key.matches("set_.{1,3}") }
.map {
println "loading ${it.key}"
it.value
}.flatMap { it.points }
.collect(Collectors.groupingBy { it.voltage })
Meta analyzerMeta = Grind.buildMeta(t0: 3e4)
NumassAnalyzer analyzer = new TimeAnalyzer()
//creating spectra
Map spectra = allPoints.collectEntries {
def point = new SimpleNumassPoint(it.key, it.value)
println "generating spectrum for ${point.voltage}"
return [(point.voltage): analyzer.getSpectrum(point, analyzerMeta)]
}
def refereceVoltage = 18600d
//subtracting reference point
if (refereceVoltage) {
println "subtracting reference point ${refereceVoltage}"
def referencePoint = spectra[refereceVoltage]
spectra = spectra.findAll { it.key != refereceVoltage }.collectEntries {
return [(it.key): NumassDataUtils.subtractSpectrum(it.getValue() as Table, referencePoint as Table)]
}
}
//Showing selected points
def showPoints = { Map points, int binning = 20, int loChannel = 300, int upChannel = 2000 ->
PlottableGroup<PlottableData> plotGroup = new PlottableGroup<>();
def adapter = new XYAdapter(CHANNEL_KEY, COUNT_RATE_KEY)
points.each {
plotGroup.add(
PlottableData.plot(
it.key as String,
adapter,
NumassAnalyzer.spectrumWithBinning(it.value as Table, binning)
)
)
}
//configuring and plotting
plotGroup.configure(showLine: true, showSymbol: false, showErrors: false, connectionType: "step")
def frame = (plots as PlotHelper).getManager().getPlotFrame("Spectra")
frame.configure("yAxis.type": "log")
frame.addAll(plotGroup)
}
showPoints(spectra.findAll { it.key in [16200d, 16400d, 16800d, 17000d, 17200d, 17700d] })
println()
Table correctionTable = TableTransform.filter(
UnderflowFitter.fitAllPoints(spectra, 450, 700, 3100, 20),
"correction",
0,
2)
ColumnedDataWriter.writeTable(System.out, correctionTable, "underflow parameters")
(plots as PlotHelper).plot(correctionTable, name: "correction", frame: "Correction") {
adapter("x.value":"U", "y.value":"correction")
}
}

View File

@ -0,0 +1,106 @@
package inr.numass.scripts.underflow
import groovy.transform.CompileStatic
import hep.dataforge.tables.ListTable
import hep.dataforge.tables.Table
import hep.dataforge.tables.TableTransform
import hep.dataforge.tables.ValueMap
import hep.dataforge.values.Values
import inr.numass.data.api.NumassAnalyzer
import org.apache.commons.math3.analysis.ParametricUnivariateFunction
import org.apache.commons.math3.exception.DimensionMismatchException
import org.apache.commons.math3.fitting.SimpleCurveFitter
import org.apache.commons.math3.fitting.WeightedObservedPoint
import java.util.stream.Collectors
import static inr.numass.data.api.NumassAnalyzer.CHANNEL_KEY
import static inr.numass.data.api.NumassAnalyzer.COUNT_RATE_KEY
@CompileStatic
class UnderflowFitter {
private static String[] pointNames = ["U", "amp", "expConst", "correction"];
static Values fitPoint(Double voltage, Table spectrum, int xLow, int xHigh, int upper, int binning) {
double norm = spectrum.getRows().filter { Values row ->
int channel = row.getInt(CHANNEL_KEY);
return channel > xLow && channel < upper;
}.mapToDouble { it.getValue(COUNT_RATE_KEY).doubleValue() }.sum();
double[] fitRes = getUnderflowExpParameters(spectrum, xLow, xHigh, binning);
double a = fitRes[0];
double sigma = fitRes[1];
return ValueMap.of(pointNames, voltage, a, sigma, a * sigma * Math.exp(xLow / sigma) / norm + 1d);
}
static Table fitAllPoints(Map<Double, Table> data, int xLow, int xHigh, int upper, int binning) {
ListTable.Builder builder = new ListTable.Builder(pointNames);
data.each { voltage, spectrum -> builder.row(fitPoint(voltage, spectrum, xLow, xHigh, upper, binning)) }
return builder.build();
}
/**
* Calculate underflow exponent parameters using (xLow, xHigh) window for
* extrapolation
*
* @param xLow
* @param xHigh
* @return
*/
private static double[] getUnderflowExpParameters(Table spectrum, int xLow, int xHigh, int binning) {
try {
if (xHigh <= xLow) {
throw new IllegalArgumentException("Wrong borders for underflow calculation");
}
Table binned = TableTransform.filter(
NumassAnalyzer.spectrumWithBinning(spectrum, binning),
CHANNEL_KEY,
xLow,
xHigh
);
List<WeightedObservedPoint> points = binned.getRows()
.map {
new WeightedObservedPoint(
1d,//1d / p.getValue() , //weight
it.getDouble(CHANNEL_KEY), // x
it.getDouble(COUNT_RATE_KEY) / binning) //y
}
.collect(Collectors.toList());
SimpleCurveFitter fitter = SimpleCurveFitter.create(new ExponentFunction(), [1d, 200d] as double[])
return fitter.fit(points);
} catch (Exception ex) {
return [0, 0] as double[];
}
}
/**
* Exponential function for fitting
*/
private static class ExponentFunction implements ParametricUnivariateFunction {
@Override
public double value(double x, double ... parameters) {
if (parameters.length != 2) {
throw new DimensionMismatchException(parameters.length, 2);
}
double a = parameters[0];
double sigma = parameters[1];
//return a * (Math.exp(x / sigma) - 1);
return a * Math.exp(x / sigma);
}
@Override
public double[] gradient(double x, double ... parameters) {
if (parameters.length != 2) {
throw new DimensionMismatchException(parameters.length, 2);
}
double a = parameters[0];
double sigma = parameters[1];
return [Math.exp(x / sigma), -a * x / sigma / sigma * Math.exp(x / sigma)] as double[]
}
}
}

View File

@ -120,7 +120,7 @@ public class MergeDataAction extends ManyToOneAction<Table, Table> {
// абсолютные ошибки складываются квадратично // абсолютные ошибки складываются квадратично
double crErr = Math.sqrt(err1 * err1 * t1 * t1 + err2 * err2 * t2 * t2) / time; double crErr = Math.sqrt(err1 * err1 * t1 * t1 + err2 * err2 * t2 * t2) / time;
ValueMap.Builder map = new ValueMap(parnames, Uset, Uread, time, total, wind, cr, crErr).builder(); ValueMap.Builder map = ValueMap.of(parnames, Uset, Uread, time, total, wind, cr, crErr).builder();
if (dp1.getNames().contains("relCR") && dp2.getNames().contains("relCR")) { if (dp1.getNames().contains("relCR") && dp2.getNames().contains("relCR")) {
double relCR = (dp1.getDouble("relCR") + dp2.getDouble("relCR")) / 2; double relCR = (dp1.getDouble("relCR") + dp2.getDouble("relCR")) / 2;

View File

@ -95,7 +95,7 @@ public class SummaryAction extends ManyToOneAction<FitState, Table> {
weights[i] += weight; weights[i] += weight;
} }
values[values.length - 1] = Value.of(state.getChi2()); values[values.length - 1] = Value.of(state.getChi2());
Values point = new ValueMap(names, values); Values point = ValueMap.of(names, (Object[]) values);
res.row(point); res.row(point);
}); });
@ -108,7 +108,7 @@ public class SummaryAction extends ManyToOneAction<FitState, Table> {
averageValues[2 * i + 2] = Value.of(1 / Math.sqrt(weights[i])); averageValues[2 * i + 2] = Value.of(1 / Math.sqrt(weights[i]));
} }
res.row(new ValueMap(names, averageValues)); res.row(ValueMap.of(names, (Object[]) averageValues));
return res.build(); return res.build();
} }

View File

@ -37,7 +37,7 @@ public class DataModelUtils {
for (int i = 0; i < numpoints; i++) { for (int i = 0; i < numpoints; i++) {
// формула работает даже в том случае когда порядок точек обратный // формула работает даже в том случае когда порядок точек обратный
double x = from + (to - from) / (numpoints - 1) * i; double x = from + (to - from) / (numpoints - 1) * i;
Values point = new ValueMap(list, x, time); Values point = ValueMap.of(list, x, time);
res.row(point); res.row(point);
} }
@ -51,7 +51,7 @@ public class DataModelUtils {
while (scan.hasNextLine()) { while (scan.hasNextLine()) {
double x = scan.nextDouble(); double x = scan.nextDouble();
int time = scan.nextInt(); int time = scan.nextInt();
res.row(new ValueMap(list, x, time)); res.row(ValueMap.of(list, x, time));
} }
return res.build(); return res.build();
} }

View File

@ -51,7 +51,7 @@ public class OldDataReader {
if (lineScan.hasNextDouble()) { if (lineScan.hasNextDouble()) {
ushift = lineScan.nextDouble(); ushift = lineScan.nextDouble();
} }
Values point = new ValueMap(list, u, time, ushift); Values point = ValueMap.of(list, u, time, ushift);
res.row(point); res.row(point);
} }
return res.build(); return res.build();

View File

@ -29,6 +29,7 @@ import static inr.numass.data.api.NumassAnalyzer.COUNT_RATE_KEY;
* *
* @author <a href="mailto:altavir@gmail.com">Alexander Nozik</a> * @author <a href="mailto:altavir@gmail.com">Alexander Nozik</a>
*/ */
@Deprecated
public class UnderflowCorrection { public class UnderflowCorrection {
private NumassAnalyzer analyzer; private NumassAnalyzer analyzer;
@ -83,7 +84,7 @@ public class UnderflowCorrection {
double a = fitRes[0]; double a = fitRes[0];
double sigma = fitRes[1]; double sigma = fitRes[1];
return new ValueMap(pointNames, point.getVoltage(), a, sigma, a * sigma * Math.exp(xLow / sigma) / norm + 1d); return ValueMap.of(pointNames, point.getVoltage(), a, sigma, a * sigma * Math.exp(xLow / sigma) / norm + 1d);
} }
public Table fitAllPoints(Iterable<NumassPoint> data, int xLow, int xHigh, int upper, int binning) { public Table fitAllPoints(Iterable<NumassPoint> data, int xLow, int xHigh, int upper, int binning) {