Working on histograms and tableformat column roles. Removed DataPoint and replaced by Values

This commit is contained in:
Alexander Nozik 2017-07-02 22:39:28 +03:00
parent 7af5bc1d8d
commit c75458515d
14 changed files with 30 additions and 29 deletions

View File

@ -8,6 +8,7 @@ package inr.numass.scripts
import hep.dataforge.grind.Grind
import hep.dataforge.values.Values
import inr.numass.data.NumassPoint
import inr.numass.data.NumassPointImpl
import inr.numass.data.RawNMPoint
import inr.numass.storage.NumassDataLoader
@ -43,18 +44,18 @@ def data = NumassDataLoader.fromLocalDir(null, dataDir).getNMPoints()
//)
//Simulation process
Map<String, List<NumassPointImpl>> res = [:]
Map<String, List<NumassPoint>> res = [:]
List<NumassPointImpl> generated = new ArrayList<>();
List<NumassPointImpl> registered = new ArrayList<>();
List<NumassPointImpl> firstIteration = new ArrayList<>();
List<NumassPointImpl> secondIteration = new ArrayList<>();
List<NumassPointImpl> pileup = new ArrayList<>();
List<NumassPoint> generated = new ArrayList<>();
List<NumassPoint> registered = new ArrayList<>();
List<NumassPoint> firstIteration = new ArrayList<>();
List<NumassPoint> secondIteration = new ArrayList<>();
List<NumassPoint> pileup = new ArrayList<>();
lowerChannel = 400;
upperChannel = 1800;
PileUpSimulator buildSimulator(NumassPointImpl point, double cr, NumassPointImpl reference = null, boolean extrapolate = true, double scale = 1d) {
PileUpSimulator buildSimulator(NumassPointImpl point, double cr, NumassPoint reference = null, boolean extrapolate = true, double scale = 1d) {
def cfg = Grind.buildMeta(cr: cr) {
pulser(mean: 3450, sigma: 86.45, freq: 66.43)
}
@ -88,7 +89,7 @@ PileUpSimulator buildSimulator(NumassPointImpl point, double cr, NumassPointImpl
return new PileUpSimulator(point.length * scale, rnd, generator).withUset(point.voltage).generate();
}
static double adjustCountRate(PileUpSimulator simulator, NumassPointImpl point) {
double adjustCountRate(PileUpSimulator simulator, NumassPointImpl point) {
double generatedInChannel = simulator.generated().getCountInWindow(lowerChannel, upperChannel);
double registeredInChannel = simulator.registered().getCountInWindow(lowerChannel, upperChannel);
return (generatedInChannel / registeredInChannel) * (point.getCountInWindow(lowerChannel, upperChannel) / point.getLength());
@ -100,7 +101,7 @@ data.forEach { point ->
PileUpSimulator simulator = buildSimulator(point, cr);
//second iteration to exclude pileup overlap
NumassPointImpl pileupPoint = simulator.pileup();
NumassPoint pileupPoint = simulator.pileup();
firstIteration.add(simulator.registered());
//updating count rate

View File

@ -124,7 +124,7 @@ public class MergeDataAction extends ManyToOneAction<Table, Table> {
ValueMap.Builder map = new ValueMap(parnames, Uset, Uread, time, total, wind, cr, crErr).builder();
if (dp1.names().contains("relCR") && dp2.names().contains("relCR")) {
if (dp1.getNames().contains("relCR") && dp2.getNames().contains("relCR")) {
double relCR = (dp1.getDouble("relCR") + dp2.getDouble("relCR")) / 2;
map.putValue("relCR", relCR);
map.putValue("relCRerr", crErr * relCR / cr);
@ -137,7 +137,7 @@ public class MergeDataAction extends ManyToOneAction<Table, Table> {
//Сливаем все точки в один набор данных
Map<Double, List<Values>> points = new LinkedHashMap<>();
for (Table d : ds) {
if (!d.getFormat().names().contains(parnames)) {
if (!d.getFormat().getNames().contains(parnames)) {
throw new IllegalArgumentException();
}
for (Values dp : d) {

View File

@ -98,7 +98,7 @@ public class MonitorCorrectAction extends OneToOneAction<Table, Table> {
double pointErr = dp.getValue("CRerr").doubleValue() / getCR(dp);
double err = Math.sqrt(corrErr * corrErr + pointErr * pointErr) * getCR(dp);
if (dp.names().contains("Monitor")) {
if (dp.getNames().contains("Monitor")) {
pb.putValue("Monitor", Value.of(dp.getValue("Monitor").doubleValue() / corrFactor));
} else {
pb.putValue("Monitor", corrFactor);
@ -109,7 +109,7 @@ public class MonitorCorrectAction extends OneToOneAction<Table, Table> {
pb.putValue("CRerr", Value.of(err));
} else {
double corrFactor = dp.getValue("CR").doubleValue() / norm;
if (dp.names().contains("Monitor")) {
if (dp.getNames().contains("Monitor")) {
pb.putValue("Monitor", Value.of(dp.getValue("Monitor").doubleValue() / corrFactor));
} else {
pb.putValue("Monitor", corrFactor);

View File

@ -62,8 +62,8 @@ public class SpectrumInformation {
if (names.length == 0) {
names = source.namesAsArray();
}
assert source.names().contains(set.namesAsArray());
assert source.names().contains(names);
assert source.getNames().contains(set.namesAsArray());
assert source.getNames().contains(names);
RealMatrix res = new Array2DRowRealMatrix(names.length, names.length);
for (Values dp : data) {
@ -84,14 +84,14 @@ public class SpectrumInformation {
}
public NamedMatrix getPointInfoMatrix(Values set, double x, double t, String... parNames) {
assert source.names().contains(set.namesAsArray());
assert source.getNames().contains(set.namesAsArray());
String[] names = parNames;
if (names.length == 0) {
names = set.namesAsArray();
}
assert source.names().contains(names);
assert source.getNames().contains(names);
RealMatrix res = new Array2DRowRealMatrix(names.length, names.length);

View File

@ -25,7 +25,7 @@ import org.apache.commons.math3.analysis.UnivariateFunction;
import org.apache.commons.math3.analysis.integration.SimpsonIntegrator;
import org.apache.commons.math3.analysis.integration.UnivariateIntegrator;
import static hep.dataforge.names.NamedUtils.combineNamesWithEquals;
import static hep.dataforge.names.NamesUtils.combineNamesWithEquals;
import static java.lang.Double.isNaN;
import static java.lang.Math.*;

View File

@ -81,7 +81,7 @@ public class GaussSourceSpectrum extends AbstractParametricFunction implements R
@Override
public boolean providesDeriv(String name) {
return this.names().contains(name);
return this.getNames().contains(name);
}
@Override

View File

@ -93,7 +93,7 @@ public class GunSpectrum extends AbstractParametricFunction {
@Override
public boolean providesDeriv(String name) {
// return false;
return this.names().contains(name);
return this.getNames().contains(name);
}
double transmissionValue(double U, double E, double resA, double resB) {

View File

@ -45,7 +45,7 @@ public class GunTailSpectrum implements RangedNamedSetSpectrum {
}
@Override
public Names names() {
public Names getNames() {
return Names.of(list);
}

View File

@ -174,7 +174,7 @@ public class LossCalculator {
UnivariateFunction scatterFunction = getSingleScatterFunction(exPos, ionPos, exW, ionW, exIonRatio);
if (set.names().contains("X")) {
if (set.getNames().contains("X")) {
final LossCalculator loss = LossCalculator.instance;
final List<Double> probs = loss.getGunLossProbabilities(set.getDouble("X"));
UnivariateFunction single = (double e) -> probs.get(1) * scatterFunction.value(e);

View File

@ -15,7 +15,7 @@
*/
package inr.numass.models;
import hep.dataforge.names.NamedUtils;
import hep.dataforge.names.NamesUtils;
import hep.dataforge.stat.parametric.AbstractParametricFunction;
import hep.dataforge.stat.parametric.ParametricFunction;
import hep.dataforge.values.ValueProvider;
@ -54,7 +54,7 @@ public class ModularSpectrum extends AbstractParametricFunction {
* @param cacheMax - верхняя граница кэширования.
*/
public ModularSpectrum(RangedNamedSetSpectrum source, BivariateFunction resolution, double cacheMin, double cacheMax) {
super(NamedUtils.combineNamesWithEquals(list, source.namesAsArray()));
super(NamesUtils.combineNamesWithEquals(list, source.namesAsArray()));
if (cacheMin >= cacheMax) {
throw new IllegalArgumentException();
}
@ -163,7 +163,7 @@ public class ModularSpectrum extends AbstractParametricFunction {
case "trap":
return this.trappingCache.value(U, set);
default:
if (sourceSpectrum.names().contains(parName)) {
if (sourceSpectrum.getNames().contains(parName)) {
List<Double> probs = calculator.getLossProbabilities(X);
updateScatterCache(probs.size() - 1);
double sum = 0;

View File

@ -21,7 +21,7 @@ import hep.dataforge.utils.MultiCounter;
import hep.dataforge.values.ValueProvider;
import hep.dataforge.values.Values;
import static hep.dataforge.names.NamedUtils.combineNamesWithEquals;
import static hep.dataforge.names.NamesUtils.combineNamesWithEquals;
/**
*

View File

@ -110,7 +110,7 @@ public class NamedSpectrumCaching extends AbstractParametricFunction {
}
protected boolean sameSet(Values set1, Values set2) {
for (String name : this.names()) {
for (String name : this.getNames()) {
if (!Objects.equals(set1.getDouble(name), set2.getDouble(name))) {
return false;
}

View File

@ -70,7 +70,7 @@ public class NumassTableFilterTask extends SingleActionTask<Table, Table> {
private Map<String, Object> unbox(Values dp) {
Map<String, Object> res = new HashMap<>();
for (String field : dp.names()) {
for (String field : dp.getNames()) {
Value val = dp.getValue(field);
Object obj;
switch (val.valueType()) {

View File

@ -148,7 +148,7 @@ public class MspViewController implements Encapsulated {
private Collection<String> joinNames(List<PointLoader> loaders) {
Set<String> nameSet = new TreeSet<>(new AlphanumComparator());
for (PointLoader loader : loaders) {
nameSet.addAll(loader.getFormat().names().asList());
nameSet.addAll(loader.getFormat().getNames().asList());
}
return nameSet;