Identity to MetaMorph

This commit is contained in:
Alexander Nozik 2017-02-25 11:13:05 +03:00
commit 134e5c771d
4 changed files with 25 additions and 18 deletions

View File

@ -6,23 +6,26 @@
package inr.numass.scripts
import hep.dataforge.io.ColumnedDataWriter
import hep.dataforge.storage.commons.StorageUtils
import hep.dataforge.tables.Table
import inr.numass.storage.NMPoint
import inr.numass.storage.NumassDataUtils
import inr.numass.storage.NumassStorage
import inr.numass.utils.UnderflowCorrection
File rootDir = new File("D:\\temp\\2016-sample\\")
File rootDir = new File("D:\\Work\\Numass\\data\\2016_10\\Fill_1")
NumassStorage storage = NumassStorage.buildLocalNumassRoot(rootDir, true);
Collection<NMPoint> data = NumassDataUtils.joinSpectra(
StorageUtils.loaderStream(storage).filter { it.key.matches("set_.*") }.map {
StorageUtils.loaderStream(storage).filter { it.key.matches("set_.{2,3}") }.map {
println "loading ${it.key}"
it.value
}
)
data = NumassDataUtils.substractReferencePoint(data, 16050d);
data = NumassDataUtils.substractReferencePoint(data, 18600d);
//println "Empty files:"
//Collection<NMPoint> emptySpectra = NumassDataUtils.joinSpectra(
@ -74,10 +77,10 @@ def printPoint(Iterable<NMPoint> data, List us, int binning = 20, normalize = fa
println "\n# spectra\n"
printPoint(data, [16550d, 17050d, 17550d])
printPoint(data, [16200d, 16400d, 16800d, 17000d, 17200d])
println()
//Table t = new UnderflowCorrection().fitAllPoints(data, 400, 700, 3100, 20);
//ColumnedDataWriter.writeDataSet(System.out, t, "underflow parameters")
Table t = new UnderflowCorrection().fitAllPoints(data, 400, 650, 3100, 20);
ColumnedDataWriter.writeDataSet(System.out, t, "underflow parameters")

View File

@ -10,8 +10,7 @@ import hep.dataforge.context.Context;
import hep.dataforge.data.DataFilter;
import hep.dataforge.data.DataNode;
import hep.dataforge.data.DataTree;
import hep.dataforge.description.DescriptorBuilder;
import hep.dataforge.description.NodeDescriptor;
import hep.dataforge.description.NodeDef;
import hep.dataforge.meta.Meta;
import hep.dataforge.meta.Template;
import hep.dataforge.tables.Table;
@ -27,6 +26,10 @@ import inr.numass.storage.NumassData;
*
* @author Alexander Nozik
*/
@NodeDef(name = "prepare")
@NodeDef(name = "monitor")
@NodeDef(name = "merge")
@NodeDef(name = "data")
public class NumassPrepareTask extends AbstractTask<Table> {
@Override
@ -167,12 +170,12 @@ public class NumassPrepareTask extends AbstractTask<Table> {
return "prepare";
}
@Override
public NodeDescriptor getDescriptor() {
return new DescriptorBuilder(getName())
.addNode("prepare", PrepareDataAction.class)
.addNode("monitor", MonitorCorrectAction.class)
.addNode("merge", MergeDataAction.class)
.build();
}
// @Override
// public NodeDescriptor getDescriptor() {
// return new DescriptorBuilder(getName())
// .addNode("prepare", PrepareDataAction.class)
// .addNode("monitor", MonitorCorrectAction.class)
// .addNode("merge", MergeDataAction.class)
// .build();
// }
}

View File

@ -21,7 +21,6 @@ import static java.lang.Math.max;
* @author <a href="mailto:altavir@gmail.com">Alexander Nozik</a>
*/
public class PileUpSimulator {
private final static double us = 1e-6;//microsecond
private final double pointLength;
private final RandomGenerator rnd;

View File

@ -26,6 +26,8 @@ import java.util.stream.Collectors;
*/
public class UnderflowCorrection {
private final static int CUTOFF = -200;
public double get(Logable log, Meta meta, NMPoint point) {
if (point.getUset() >= meta.getDouble("underflow.threshold", 17000)) {
if (meta.hasValue("underflow.function")) {
@ -68,7 +70,7 @@ public class UnderflowCorrection {
double sigma = fitRes[1];
//builder.row(point.getUset(), a, sigma, (a * sigma * (Math.exp(xLow / sigma) - 1) - a*xLow) / norm + 1d);
builder.row(point.getUset(), a, sigma, a * sigma * (Math.exp(xLow / sigma)) / norm + 1d);
builder.row(point.getUset(), a, sigma, a * sigma * (Math.exp(xLow / sigma) - Math.exp(CUTOFF / sigma)) / norm + 1d);
}
return builder.build();
}