Working on histograms and tableformat column roles. Removed DataPoint and replaced by Values
This commit is contained in:
parent
7f89c69643
commit
7af5bc1d8d
@ -24,8 +24,8 @@ import hep.dataforge.meta.Meta;
|
|||||||
import hep.dataforge.meta.MetaBuilder;
|
import hep.dataforge.meta.MetaBuilder;
|
||||||
import hep.dataforge.storage.commons.MessageFactory;
|
import hep.dataforge.storage.commons.MessageFactory;
|
||||||
import hep.dataforge.storage.commons.StorageUtils;
|
import hep.dataforge.storage.commons.StorageUtils;
|
||||||
import hep.dataforge.tables.DataPoint;
|
|
||||||
import hep.dataforge.values.Value;
|
import hep.dataforge.values.Value;
|
||||||
|
import hep.dataforge.values.Values;
|
||||||
import inr.numass.storage.NumassStorage;
|
import inr.numass.storage.NumassStorage;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
import org.zeroturnaround.zip.ZipUtil;
|
import org.zeroturnaround.zip.ZipUtil;
|
||||||
@ -251,7 +251,7 @@ public class NumassClient implements AutoCloseable, Responder {
|
|||||||
* @param points
|
* @param points
|
||||||
* @return
|
* @return
|
||||||
*/
|
*/
|
||||||
public Envelope sendDataPoints(String shelf, String loaderName, Collection<DataPoint> points) {
|
public Envelope sendDataPoints(String shelf, String loaderName, Collection<Values> points) {
|
||||||
throw new UnsupportedOperationException();
|
throw new UnsupportedOperationException();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -34,10 +34,10 @@ import hep.dataforge.meta.Meta;
|
|||||||
import hep.dataforge.storage.api.PointLoader;
|
import hep.dataforge.storage.api.PointLoader;
|
||||||
import hep.dataforge.storage.api.Storage;
|
import hep.dataforge.storage.api.Storage;
|
||||||
import hep.dataforge.storage.commons.LoaderFactory;
|
import hep.dataforge.storage.commons.LoaderFactory;
|
||||||
import hep.dataforge.tables.DataPoint;
|
|
||||||
import hep.dataforge.tables.TableFormat;
|
import hep.dataforge.tables.TableFormat;
|
||||||
import hep.dataforge.tables.TableFormatBuilder;
|
import hep.dataforge.tables.TableFormatBuilder;
|
||||||
import hep.dataforge.utils.DateTimeUtils;
|
import hep.dataforge.utils.DateTimeUtils;
|
||||||
|
import hep.dataforge.values.Values;
|
||||||
import inr.numass.control.StorageHelper;
|
import inr.numass.control.StorageHelper;
|
||||||
|
|
||||||
import java.time.Duration;
|
import java.time.Duration;
|
||||||
@ -136,7 +136,7 @@ public class PKT8Device extends PortSensor<PKT8Result> {
|
|||||||
collector = new RegularPointCollector(
|
collector = new RegularPointCollector(
|
||||||
duration,
|
duration,
|
||||||
channels.values().stream().map(PKT8Channel::getName).collect(Collectors.toList()),
|
channels.values().stream().map(PKT8Channel::getName).collect(Collectors.toList()),
|
||||||
(DataPoint dp) -> {
|
(Values dp) -> {
|
||||||
getLogger().debug("Point measurement complete. Pushing...");
|
getLogger().debug("Point measurement complete. Pushing...");
|
||||||
storageHelper.push(dp);
|
storageHelper.push(dp);
|
||||||
});
|
});
|
||||||
|
@ -39,11 +39,11 @@ import hep.dataforge.meta.Meta;
|
|||||||
import hep.dataforge.storage.api.PointLoader;
|
import hep.dataforge.storage.api.PointLoader;
|
||||||
import hep.dataforge.storage.api.Storage;
|
import hep.dataforge.storage.api.Storage;
|
||||||
import hep.dataforge.storage.commons.LoaderFactory;
|
import hep.dataforge.storage.commons.LoaderFactory;
|
||||||
import hep.dataforge.tables.DataPoint;
|
|
||||||
import hep.dataforge.tables.TableFormat;
|
import hep.dataforge.tables.TableFormat;
|
||||||
import hep.dataforge.tables.TableFormatBuilder;
|
import hep.dataforge.tables.TableFormatBuilder;
|
||||||
import hep.dataforge.utils.DateTimeUtils;
|
import hep.dataforge.utils.DateTimeUtils;
|
||||||
import hep.dataforge.values.Value;
|
import hep.dataforge.values.Value;
|
||||||
|
import hep.dataforge.values.Values;
|
||||||
import inr.numass.control.StorageHelper;
|
import inr.numass.control.StorageHelper;
|
||||||
|
|
||||||
import java.time.Duration;
|
import java.time.Duration;
|
||||||
@ -61,7 +61,7 @@ import java.util.function.Consumer;
|
|||||||
@StateDef(value = @ValueDef(name = "filament", info = "The number of filament in use"), writable = true)
|
@StateDef(value = @ValueDef(name = "filament", info = "The number of filament in use"), writable = true)
|
||||||
@StateDef(value = @ValueDef(name = "filamentOn", info = "Mass-spectrometer filament on"), writable = true)
|
@StateDef(value = @ValueDef(name = "filamentOn", info = "Mass-spectrometer filament on"), writable = true)
|
||||||
@StateDef(@ValueDef(name = "filamentStatus", info = "Filament status"))
|
@StateDef(@ValueDef(name = "filamentStatus", info = "Filament status"))
|
||||||
public class MspDevice extends Sensor<DataPoint> implements PortHandler.PortController {
|
public class MspDevice extends Sensor<Values> implements PortHandler.PortController {
|
||||||
public static final String MSP_DEVICE_TYPE = "msp";
|
public static final String MSP_DEVICE_TYPE = "msp";
|
||||||
|
|
||||||
private static final Duration TIMEOUT = Duration.ofMillis(200);
|
private static final Duration TIMEOUT = Duration.ofMillis(200);
|
||||||
@ -409,7 +409,7 @@ public class MspDevice extends Sensor<DataPoint> implements PortHandler.PortCont
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public class PeakJumpMeasurement extends AbstractMeasurement<DataPoint> implements Consumer<MspResponse> {
|
public class PeakJumpMeasurement extends AbstractMeasurement<Values> implements Consumer<MspResponse> {
|
||||||
|
|
||||||
private RegularPointCollector collector = new RegularPointCollector(getAveragingDuration(), this::result);
|
private RegularPointCollector collector = new RegularPointCollector(getAveragingDuration(), this::result);
|
||||||
private StorageHelper helper = new StorageHelper(MspDevice.this, this::makeLoader);
|
private StorageHelper helper = new StorageHelper(MspDevice.this, this::makeLoader);
|
||||||
@ -499,7 +499,7 @@ public class MspDevice extends Sensor<DataPoint> implements PortHandler.PortCont
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected synchronized void result(DataPoint result, Instant time) {
|
protected synchronized void result(Values result, Instant time) {
|
||||||
super.result(result, time);
|
super.result(result, time);
|
||||||
helper.push(result);
|
helper.push(result);
|
||||||
}
|
}
|
||||||
|
@ -4,7 +4,7 @@ import hep.dataforge.control.connections.StorageConnection
|
|||||||
import hep.dataforge.control.devices.AbstractDevice
|
import hep.dataforge.control.devices.AbstractDevice
|
||||||
import hep.dataforge.exceptions.StorageException
|
import hep.dataforge.exceptions.StorageException
|
||||||
import hep.dataforge.storage.api.PointLoader
|
import hep.dataforge.storage.api.PointLoader
|
||||||
import hep.dataforge.tables.DataPoint
|
import hep.dataforge.values.Values
|
||||||
import java.util.*
|
import java.util.*
|
||||||
import java.util.function.Function
|
import java.util.function.Function
|
||||||
|
|
||||||
@ -15,7 +15,7 @@ import java.util.function.Function
|
|||||||
class StorageHelper(private val device: AbstractDevice, private val loaderFactory: Function<StorageConnection, PointLoader>) : AutoCloseable {
|
class StorageHelper(private val device: AbstractDevice, private val loaderFactory: Function<StorageConnection, PointLoader>) : AutoCloseable {
|
||||||
private val loaderMap = HashMap<StorageConnection, PointLoader>()
|
private val loaderMap = HashMap<StorageConnection, PointLoader>()
|
||||||
|
|
||||||
fun push(point: DataPoint) {
|
fun push(point: Values) {
|
||||||
if (!device.hasState("storing") || device.getState("storing").booleanValue()) {
|
if (!device.hasState("storing") || device.getState("storing").booleanValue()) {
|
||||||
device.forEachConnection("storage", StorageConnection::class.java) { connection ->
|
device.forEachConnection("storage", StorageConnection::class.java) { connection ->
|
||||||
val pl = loaderMap.computeIfAbsent(connection, loaderFactory)
|
val pl = loaderMap.computeIfAbsent(connection, loaderFactory)
|
||||||
|
@ -21,12 +21,12 @@ import hep.dataforge.exceptions.ControlException;
|
|||||||
import hep.dataforge.meta.Meta;
|
import hep.dataforge.meta.Meta;
|
||||||
import hep.dataforge.storage.api.PointLoader;
|
import hep.dataforge.storage.api.PointLoader;
|
||||||
import hep.dataforge.storage.commons.LoaderFactory;
|
import hep.dataforge.storage.commons.LoaderFactory;
|
||||||
import hep.dataforge.tables.DataPoint;
|
|
||||||
import hep.dataforge.tables.MapPoint;
|
|
||||||
import hep.dataforge.tables.TableFormatBuilder;
|
import hep.dataforge.tables.TableFormatBuilder;
|
||||||
|
import hep.dataforge.tables.ValueMap;
|
||||||
import hep.dataforge.utils.DateTimeUtils;
|
import hep.dataforge.utils.DateTimeUtils;
|
||||||
import hep.dataforge.values.Value;
|
import hep.dataforge.values.Value;
|
||||||
import hep.dataforge.values.ValueType;
|
import hep.dataforge.values.ValueType;
|
||||||
|
import hep.dataforge.values.Values;
|
||||||
import inr.numass.control.StorageHelper;
|
import inr.numass.control.StorageHelper;
|
||||||
|
|
||||||
import java.time.Duration;
|
import java.time.Duration;
|
||||||
@ -50,7 +50,7 @@ import static hep.dataforge.control.devices.PortSensor.CONNECTED_STATE;
|
|||||||
value = @ValueDef(name = "storing", info = "Define if this device is currently writes to storage"),
|
value = @ValueDef(name = "storing", info = "Define if this device is currently writes to storage"),
|
||||||
writable = true
|
writable = true
|
||||||
)
|
)
|
||||||
public class VacCollectorDevice extends Sensor<DataPoint> {
|
public class VacCollectorDevice extends Sensor<Values> {
|
||||||
|
|
||||||
private Map<String, Sensor<Double>> sensorMap = new LinkedHashMap<>();
|
private Map<String, Sensor<Double>> sensorMap = new LinkedHashMap<>();
|
||||||
private StorageHelper helper = new StorageHelper(VacCollectorDevice.this, this::buildLoader);
|
private StorageHelper helper = new StorageHelper(VacCollectorDevice.this, this::buildLoader);
|
||||||
@ -86,7 +86,7 @@ public class VacCollectorDevice extends Sensor<DataPoint> {
|
|||||||
//TODO add dot path notation for states
|
//TODO add dot path notation for states
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected Measurement<DataPoint> createMeasurement() {
|
protected Measurement<Values> createMeasurement() {
|
||||||
//TODO use meta
|
//TODO use meta
|
||||||
return new VacuumMeasurement();
|
return new VacuumMeasurement();
|
||||||
}
|
}
|
||||||
@ -132,7 +132,7 @@ public class VacCollectorDevice extends Sensor<DataPoint> {
|
|||||||
return Duration.parse(meta().getString("averagingDuration", "PT30S"));
|
return Duration.parse(meta().getString("averagingDuration", "PT30S"));
|
||||||
}
|
}
|
||||||
|
|
||||||
private class VacuumMeasurement extends AbstractMeasurement<DataPoint> {
|
private class VacuumMeasurement extends AbstractMeasurement<Values> {
|
||||||
|
|
||||||
private final ValueCollector collector = new RegularPointCollector(getAveragingDuration(), this::result);
|
private final ValueCollector collector = new RegularPointCollector(getAveragingDuration(), this::result);
|
||||||
private ScheduledExecutorService executor;
|
private ScheduledExecutorService executor;
|
||||||
@ -167,13 +167,13 @@ public class VacCollectorDevice extends Sensor<DataPoint> {
|
|||||||
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected synchronized void result(DataPoint result, Instant time) {
|
protected synchronized void result(Values result, Instant time) {
|
||||||
super.result(result, time);
|
super.result(result, time);
|
||||||
helper.push(result);
|
helper.push(result);
|
||||||
}
|
}
|
||||||
|
|
||||||
private DataPoint terminator() {
|
private Values terminator() {
|
||||||
MapPoint.Builder p = new MapPoint.Builder();
|
ValueMap.Builder p = new ValueMap.Builder();
|
||||||
p.putValue("timestamp", DateTimeUtils.now());
|
p.putValue("timestamp", DateTimeUtils.now());
|
||||||
sensorMap.keySet().forEach((n) -> {
|
sensorMap.keySet().forEach((n) -> {
|
||||||
p.putValue(n, null);
|
p.putValue(n, null);
|
||||||
|
@ -1,8 +1,8 @@
|
|||||||
package inr.numass.data;
|
package inr.numass.data;
|
||||||
|
|
||||||
import hep.dataforge.tables.DataPoint;
|
|
||||||
import hep.dataforge.tables.ListTable;
|
import hep.dataforge.tables.ListTable;
|
||||||
import hep.dataforge.tables.Table;
|
import hep.dataforge.tables.Table;
|
||||||
|
import hep.dataforge.values.Values;
|
||||||
|
|
||||||
import java.time.Instant;
|
import java.time.Instant;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
@ -81,7 +81,7 @@ public class NumassDataUtils {
|
|||||||
public static Table setHVScale(ListTable data, double beta) {
|
public static Table setHVScale(ListTable data, double beta) {
|
||||||
SpectrumDataAdapter reader = adapter();
|
SpectrumDataAdapter reader = adapter();
|
||||||
ListTable.Builder res = new ListTable.Builder(data.getFormat());
|
ListTable.Builder res = new ListTable.Builder(data.getFormat());
|
||||||
for (DataPoint dp : data) {
|
for (Values dp : data) {
|
||||||
double corrFactor = 1 + beta;
|
double corrFactor = 1 + beta;
|
||||||
res.row(reader.buildSpectrumDataPoint(reader.getX(dp).doubleValue() * corrFactor, reader.getCount(dp), reader.getTime(dp)));
|
res.row(reader.buildSpectrumDataPoint(reader.getX(dp).doubleValue() * corrFactor, reader.getCount(dp), reader.getTime(dp)));
|
||||||
}
|
}
|
||||||
@ -106,7 +106,7 @@ public class NumassDataUtils {
|
|||||||
public static Table correctForDeadTime(ListTable data, SpectrumDataAdapter adapter, double dtime) {
|
public static Table correctForDeadTime(ListTable data, SpectrumDataAdapter adapter, double dtime) {
|
||||||
// SpectrumDataAdapter adapter = adapter();
|
// SpectrumDataAdapter adapter = adapter();
|
||||||
ListTable.Builder res = new ListTable.Builder(data.getFormat());
|
ListTable.Builder res = new ListTable.Builder(data.getFormat());
|
||||||
for (DataPoint dp : data) {
|
for (Values dp : data) {
|
||||||
double corrFactor = 1 / (1 - dtime * adapter.getCount(dp) / adapter.getTime(dp));
|
double corrFactor = 1 / (1 - dtime * adapter.getCount(dp) / adapter.getTime(dp));
|
||||||
res.row(adapter.buildSpectrumDataPoint(adapter.getX(dp).doubleValue(), (long) (adapter.getCount(dp) * corrFactor), adapter.getTime(dp)));
|
res.row(adapter.buildSpectrumDataPoint(adapter.getX(dp).doubleValue(), (long) (adapter.getCount(dp) * corrFactor), adapter.getTime(dp)));
|
||||||
}
|
}
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
package inr.numass.data;
|
package inr.numass.data;
|
||||||
|
|
||||||
import hep.dataforge.tables.DataPoint;
|
import hep.dataforge.tables.ValueMap;
|
||||||
import hep.dataforge.tables.MapPoint;
|
import hep.dataforge.values.Values;
|
||||||
|
|
||||||
import java.time.Instant;
|
import java.time.Instant;
|
||||||
import java.util.LinkedHashMap;
|
import java.util.LinkedHashMap;
|
||||||
@ -21,7 +21,7 @@ public interface NumassPoint {
|
|||||||
|
|
||||||
int getCountInWindow(int from, int to);
|
int getCountInWindow(int from, int to);
|
||||||
|
|
||||||
List<DataPoint> getData();
|
List<Values> getData();
|
||||||
|
|
||||||
long getTotalCount();
|
long getTotalCount();
|
||||||
|
|
||||||
@ -49,9 +49,9 @@ public interface NumassPoint {
|
|||||||
return res;
|
return res;
|
||||||
}
|
}
|
||||||
|
|
||||||
default List<DataPoint> getData(int binning, boolean normalize) {
|
default List<Values> getData(int binning, boolean normalize) {
|
||||||
return getMap(binning, normalize).entrySet().stream()
|
return getMap(binning, normalize).entrySet().stream()
|
||||||
.map(entry -> new MapPoint(dataNames, entry.getKey(), entry.getValue()))
|
.map(entry -> new ValueMap(dataNames, entry.getKey(), entry.getValue()))
|
||||||
.collect(Collectors.toList());
|
.collect(Collectors.toList());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -15,8 +15,8 @@
|
|||||||
*/
|
*/
|
||||||
package inr.numass.data;
|
package inr.numass.data;
|
||||||
|
|
||||||
import hep.dataforge.tables.DataPoint;
|
import hep.dataforge.tables.ValueMap;
|
||||||
import hep.dataforge.tables.MapPoint;
|
import hep.dataforge.values.Values;
|
||||||
|
|
||||||
import java.time.Instant;
|
import java.time.Instant;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
@ -72,10 +72,10 @@ public class NumassPointImpl implements NumassPoint {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public List<DataPoint> getData() {
|
public List<Values> getData() {
|
||||||
List<DataPoint> data = new ArrayList<>();
|
List<Values> data = new ArrayList<>();
|
||||||
for (int i = 0; i < RawNMPoint.MAX_CHANEL; i++) {
|
for (int i = 0; i < RawNMPoint.MAX_CHANEL; i++) {
|
||||||
data.add(new MapPoint(dataNames, i, spectrum[i]));
|
data.add(new ValueMap(dataNames, i, spectrum[i]));
|
||||||
}
|
}
|
||||||
return data;
|
return data;
|
||||||
}
|
}
|
||||||
|
@ -19,11 +19,11 @@ import hep.dataforge.exceptions.DataFormatException;
|
|||||||
import hep.dataforge.exceptions.NameNotFoundException;
|
import hep.dataforge.exceptions.NameNotFoundException;
|
||||||
import hep.dataforge.meta.Meta;
|
import hep.dataforge.meta.Meta;
|
||||||
import hep.dataforge.meta.MetaBuilder;
|
import hep.dataforge.meta.MetaBuilder;
|
||||||
import hep.dataforge.tables.DataPoint;
|
|
||||||
import hep.dataforge.tables.MapPoint;
|
|
||||||
import hep.dataforge.tables.PointAdapter;
|
import hep.dataforge.tables.PointAdapter;
|
||||||
|
import hep.dataforge.tables.ValueMap;
|
||||||
import hep.dataforge.tables.XYAdapter;
|
import hep.dataforge.tables.XYAdapter;
|
||||||
import hep.dataforge.values.Value;
|
import hep.dataforge.values.Value;
|
||||||
|
import hep.dataforge.values.Values;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
@ -59,29 +59,29 @@ public class SpectrumDataAdapter extends XYAdapter {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
public double getTime(DataPoint point) {
|
public double getTime(Values point) {
|
||||||
return this.getFrom(point, POINT_LENGTH_NAME, 1d).doubleValue();
|
return this.getFrom(point, POINT_LENGTH_NAME, 1d).doubleValue();
|
||||||
}
|
}
|
||||||
|
|
||||||
public DataPoint buildSpectrumDataPoint(double x, long count, double t) {
|
public Values buildSpectrumDataPoint(double x, long count, double t) {
|
||||||
return new MapPoint(new String[]{nameFor(X_VALUE_KEY), nameFor(Y_VALUE_KEY),
|
return new ValueMap(new String[]{nameFor(X_VALUE_KEY), nameFor(Y_VALUE_KEY),
|
||||||
nameFor(POINT_LENGTH_NAME)},
|
nameFor(POINT_LENGTH_NAME)},
|
||||||
x, count, t);
|
x, count, t);
|
||||||
}
|
}
|
||||||
|
|
||||||
public DataPoint buildSpectrumDataPoint(double x, long count, double countErr, double t) {
|
public Values buildSpectrumDataPoint(double x, long count, double countErr, double t) {
|
||||||
return new MapPoint(new String[]{nameFor(X_VALUE_KEY), nameFor(Y_VALUE_KEY),
|
return new ValueMap(new String[]{nameFor(X_VALUE_KEY), nameFor(Y_VALUE_KEY),
|
||||||
nameFor(Y_ERROR_KEY), nameFor(POINT_LENGTH_NAME)},
|
nameFor(Y_ERROR_KEY), nameFor(POINT_LENGTH_NAME)},
|
||||||
x, count, countErr, t);
|
x, count, countErr, t);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean providesYError(DataPoint point) {
|
public boolean providesYError(Values point) {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Value getYerr(DataPoint point) throws NameNotFoundException {
|
public Value getYerr(Values point) throws NameNotFoundException {
|
||||||
if (super.providesYError(point)) {
|
if (super.providesYError(point)) {
|
||||||
return Value.of(super.getYerr(point).doubleValue() / getTime(point));
|
return Value.of(super.getYerr(point).doubleValue() / getTime(point));
|
||||||
} else {
|
} else {
|
||||||
@ -94,12 +94,12 @@ public class SpectrumDataAdapter extends XYAdapter {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public long getCount(DataPoint point) {
|
public long getCount(Values point) {
|
||||||
return super.getY(point).numberValue().longValue();
|
return super.getY(point).numberValue().longValue();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Value getY(DataPoint point) {
|
public Value getY(Values point) {
|
||||||
return Value.of(super.getY(point).doubleValue() / getTime(point));
|
return Value.of(super.getY(point).doubleValue() / getTime(point));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -8,8 +8,8 @@ package inr.numass.scripts
|
|||||||
|
|
||||||
import hep.dataforge.io.ColumnedDataWriter
|
import hep.dataforge.io.ColumnedDataWriter
|
||||||
import hep.dataforge.tables.ListTable
|
import hep.dataforge.tables.ListTable
|
||||||
import hep.dataforge.tables.MapPoint
|
|
||||||
import hep.dataforge.tables.TableFormatBuilder
|
import hep.dataforge.tables.TableFormatBuilder
|
||||||
|
import hep.dataforge.tables.ValueMap
|
||||||
import inr.numass.data.NumassData
|
import inr.numass.data.NumassData
|
||||||
|
|
||||||
NumassData.metaClass.findPoint{double u ->
|
NumassData.metaClass.findPoint{double u ->
|
||||||
@ -28,19 +28,19 @@ Map<Double, Double> dif(NumassData data1, NumassData data2, double uset){
|
|||||||
|
|
||||||
def buildSet(NumassData data1, NumassData data2, double... points){
|
def buildSet(NumassData data1, NumassData data2, double... points){
|
||||||
TableFormatBuilder builder = new TableFormatBuilder().addNumber("channel");
|
TableFormatBuilder builder = new TableFormatBuilder().addNumber("channel");
|
||||||
List<MapPoint> pointList = new ArrayList<>();
|
List<ValueMap> pointList = new ArrayList<>();
|
||||||
|
|
||||||
for(double point: points){
|
for(double point: points){
|
||||||
builder.addNumber(Double.toString(point));
|
builder.addNumber(Double.toString(point));
|
||||||
Map<Double, Double> dif = dif(data1, data2, point);
|
Map<Double, Double> dif = dif(data1, data2, point);
|
||||||
if(pointList.isEmpty()){
|
if(pointList.isEmpty()){
|
||||||
for(Double channel : dif.keySet()){
|
for(Double channel : dif.keySet()){
|
||||||
MapPoint p = new MapPoint();
|
ValueMap p = new ValueMap();
|
||||||
p.putValue("channel",channel);
|
p.putValue("channel",channel);
|
||||||
pointList.add(p);
|
pointList.add(p);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
for(MapPoint mp:pointList){
|
for(ValueMap mp:pointList){
|
||||||
double channel = mp.getValue("channel").doubleValue();
|
double channel = mp.getValue("channel").doubleValue();
|
||||||
mp.putValue(Double.toString(point), dif.get(channel));
|
mp.putValue(Double.toString(point), dif.get(channel));
|
||||||
}
|
}
|
||||||
|
@ -7,7 +7,7 @@
|
|||||||
package inr.numass.scripts
|
package inr.numass.scripts
|
||||||
|
|
||||||
import hep.dataforge.grind.Grind
|
import hep.dataforge.grind.Grind
|
||||||
import hep.dataforge.tables.DataPoint
|
import hep.dataforge.values.Values
|
||||||
import inr.numass.data.NumassPointImpl
|
import inr.numass.data.NumassPointImpl
|
||||||
import inr.numass.data.RawNMPoint
|
import inr.numass.data.RawNMPoint
|
||||||
import inr.numass.storage.NumassDataLoader
|
import inr.numass.storage.NumassDataLoader
|
||||||
@ -63,7 +63,7 @@ PileUpSimulator buildSimulator(NumassPointImpl point, double cr, NumassPointImpl
|
|||||||
if (extrapolate) {
|
if (extrapolate) {
|
||||||
double[] chanels = new double[RawNMPoint.MAX_CHANEL];
|
double[] chanels = new double[RawNMPoint.MAX_CHANEL];
|
||||||
double[] values = new double[RawNMPoint.MAX_CHANEL];
|
double[] values = new double[RawNMPoint.MAX_CHANEL];
|
||||||
DataPoint fitResult = new UnderflowCorrection().fitPoint(point, 400, 600, 1800, 20); numa
|
Values fitResult = new UnderflowCorrection().fitPoint(point, 400, 600, 1800, 20); numa
|
||||||
|
|
||||||
def amp = fitResult.getDouble("amp")
|
def amp = fitResult.getDouble("amp")
|
||||||
def sigma = fitResult.getDouble("expConst")
|
def sigma = fitResult.getDouble("expConst")
|
||||||
@ -74,7 +74,7 @@ PileUpSimulator buildSimulator(NumassPointImpl point, double cr, NumassPointImpl
|
|||||||
if (i < lowerChannel) {
|
if (i < lowerChannel) {
|
||||||
values[i] = point.getLength()*amp * Math.exp((i as double) / sigma)
|
values[i] = point.getLength()*amp * Math.exp((i as double) / sigma)
|
||||||
} else {
|
} else {
|
||||||
values[i] = Math.max(0, point.getCount(i) - (reference == null ? 0 : reference.getCount(i)));
|
values[i] = Math.max(0, point.getCount(i) - (reference == null ? 0 : reference.getCount(i)) as int);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
generator.loadSpectrum(chanels, values)
|
generator.loadSpectrum(chanels, values)
|
||||||
@ -88,7 +88,7 @@ PileUpSimulator buildSimulator(NumassPointImpl point, double cr, NumassPointImpl
|
|||||||
return new PileUpSimulator(point.length * scale, rnd, generator).withUset(point.voltage).generate();
|
return new PileUpSimulator(point.length * scale, rnd, generator).withUset(point.voltage).generate();
|
||||||
}
|
}
|
||||||
|
|
||||||
double adjustCountRate(PileUpSimulator simulator, NumassPointImpl point) {
|
static double adjustCountRate(PileUpSimulator simulator, NumassPointImpl point) {
|
||||||
double generatedInChannel = simulator.generated().getCountInWindow(lowerChannel, upperChannel);
|
double generatedInChannel = simulator.generated().getCountInWindow(lowerChannel, upperChannel);
|
||||||
double registeredInChannel = simulator.registered().getCountInWindow(lowerChannel, upperChannel);
|
double registeredInChannel = simulator.registered().getCountInWindow(lowerChannel, upperChannel);
|
||||||
return (generatedInChannel / registeredInChannel) * (point.getCountInWindow(lowerChannel, upperChannel) / point.getLength());
|
return (generatedInChannel / registeredInChannel) * (point.getCountInWindow(lowerChannel, upperChannel) / point.getLength());
|
||||||
|
@ -5,7 +5,7 @@ import hep.dataforge.context.Global
|
|||||||
import hep.dataforge.grind.GrindShell
|
import hep.dataforge.grind.GrindShell
|
||||||
import hep.dataforge.grind.helpers.PlotHelper
|
import hep.dataforge.grind.helpers.PlotHelper
|
||||||
import hep.dataforge.plots.fx.FXPlotManager
|
import hep.dataforge.plots.fx.FXPlotManager
|
||||||
import hep.dataforge.tables.MapPoint
|
import hep.dataforge.tables.ValueMap
|
||||||
import inr.numass.NumassPlugin
|
import inr.numass.NumassPlugin
|
||||||
import inr.numass.data.PointAnalyzer
|
import inr.numass.data.PointAnalyzer
|
||||||
import inr.numass.data.RawNMPoint
|
import inr.numass.data.RawNMPoint
|
||||||
@ -36,7 +36,7 @@ shell.eval {
|
|||||||
|
|
||||||
def plotPoints = t0.collect {
|
def plotPoints = t0.collect {
|
||||||
def result = PointAnalyzer.analyzePoint(point, it)
|
def result = PointAnalyzer.analyzePoint(point, it)
|
||||||
MapPoint.fromMap("x.value": it, "y.value": result.cr, "y.err": result.crErr);
|
ValueMap.fromMap("x.value": it, "y.value": result.cr, "y.err": result.crErr);
|
||||||
}
|
}
|
||||||
//def cr = t0.collect { PointAnalyzer.analyzePoint(point, it).cr }
|
//def cr = t0.collect { PointAnalyzer.analyzePoint(point, it).cr }
|
||||||
|
|
||||||
|
@ -10,10 +10,10 @@ import hep.dataforge.context.Context;
|
|||||||
import hep.dataforge.description.TypedActionDef;
|
import hep.dataforge.description.TypedActionDef;
|
||||||
import hep.dataforge.meta.Laminate;
|
import hep.dataforge.meta.Laminate;
|
||||||
import hep.dataforge.meta.Meta;
|
import hep.dataforge.meta.Meta;
|
||||||
import hep.dataforge.tables.DataPoint;
|
|
||||||
import hep.dataforge.tables.ListTable;
|
import hep.dataforge.tables.ListTable;
|
||||||
import hep.dataforge.tables.MapPoint;
|
|
||||||
import hep.dataforge.tables.Table;
|
import hep.dataforge.tables.Table;
|
||||||
|
import hep.dataforge.tables.ValueMap;
|
||||||
|
import hep.dataforge.values.Values;
|
||||||
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
@ -28,15 +28,15 @@ public class AdjustErrorsAction extends OneToOneAction<Table, Table> {
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected Table execute(Context context, String name, Table input, Laminate meta) {
|
protected Table execute(Context context, String name, Table input, Laminate meta) {
|
||||||
List<DataPoint> points = new ArrayList<>();
|
List<Values> points = new ArrayList<>();
|
||||||
for (DataPoint dp : input) {
|
for (Values dp : input) {
|
||||||
points.add(evalPoint(meta, dp));
|
points.add(evalPoint(meta, dp));
|
||||||
}
|
}
|
||||||
|
|
||||||
return new ListTable(input.getFormat(), points);
|
return new ListTable(input.getFormat(), points);
|
||||||
}
|
}
|
||||||
|
|
||||||
private DataPoint evalPoint(Meta meta, DataPoint dp) {
|
private Values evalPoint(Meta meta, Values dp) {
|
||||||
if (meta.hasMeta("point")) {
|
if (meta.hasMeta("point")) {
|
||||||
for (Meta pointMeta : meta.getMetaList("point")) {
|
for (Meta pointMeta : meta.getMetaList("point")) {
|
||||||
if (pointMeta.getDouble("Uset") == dp.getDouble("Uset")) {
|
if (pointMeta.getDouble("Uset") == dp.getDouble("Uset")) {
|
||||||
@ -63,8 +63,8 @@ public class AdjustErrorsAction extends OneToOneAction<Table, Table> {
|
|||||||
return dp;
|
return dp;
|
||||||
}
|
}
|
||||||
|
|
||||||
private DataPoint adjust(DataPoint dp, Meta config) {
|
private Values adjust(Values dp, Meta config) {
|
||||||
MapPoint.Builder res = new MapPoint.Builder(dp);
|
ValueMap.Builder res = new ValueMap.Builder(dp);
|
||||||
if (dp.hasValue("CRerr")) {
|
if (dp.hasValue("CRerr")) {
|
||||||
double instability = 0;
|
double instability = 0;
|
||||||
if (dp.hasValue("CR")) {
|
if (dp.hasValue("CR")) {
|
||||||
|
@ -62,9 +62,7 @@ public class DebunchAction extends OneToOneAction<RawNMFile, RawNMFile> {
|
|||||||
point = report.getPoint();
|
point = report.getPoint();
|
||||||
}
|
}
|
||||||
return point;
|
return point;
|
||||||
}).forEach((point) -> {
|
}).forEach(res::putPoint);
|
||||||
res.putPoint(point);
|
|
||||||
});
|
|
||||||
report(context, name, "File {} completed", source.getName());
|
report(context, name, "File {} completed", source.getName());
|
||||||
|
|
||||||
context.getChronicle(name).print(new PrintWriter(buildActionOutput(context, name)));
|
context.getChronicle(name).print(new PrintWriter(buildActionOutput(context, name)));
|
||||||
|
@ -22,8 +22,8 @@ import hep.dataforge.exceptions.ContentException;
|
|||||||
import hep.dataforge.io.ColumnedDataWriter;
|
import hep.dataforge.io.ColumnedDataWriter;
|
||||||
import hep.dataforge.meta.Laminate;
|
import hep.dataforge.meta.Laminate;
|
||||||
import hep.dataforge.tables.ListTable;
|
import hep.dataforge.tables.ListTable;
|
||||||
import hep.dataforge.tables.MapPoint;
|
|
||||||
import hep.dataforge.tables.Table;
|
import hep.dataforge.tables.Table;
|
||||||
|
import hep.dataforge.tables.ValueMap;
|
||||||
import hep.dataforge.values.Value;
|
import hep.dataforge.values.Value;
|
||||||
import inr.numass.data.NMFile;
|
import inr.numass.data.NMFile;
|
||||||
import inr.numass.data.NumassData;
|
import inr.numass.data.NumassData;
|
||||||
@ -125,7 +125,7 @@ public class FindBorderAction extends OneToOneAction<NumassData, Table> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
dataBuilder.row(new MapPoint(map));
|
dataBuilder.row(new ValueMap(map));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -25,6 +25,7 @@ import hep.dataforge.io.ColumnedDataWriter;
|
|||||||
import hep.dataforge.meta.Laminate;
|
import hep.dataforge.meta.Laminate;
|
||||||
import hep.dataforge.meta.Meta;
|
import hep.dataforge.meta.Meta;
|
||||||
import hep.dataforge.tables.*;
|
import hep.dataforge.tables.*;
|
||||||
|
import hep.dataforge.values.Values;
|
||||||
|
|
||||||
import java.io.OutputStream;
|
import java.io.OutputStream;
|
||||||
import java.util.*;
|
import java.util.*;
|
||||||
@ -90,7 +91,7 @@ public class MergeDataAction extends ManyToOneAction<Table, Table> {
|
|||||||
// return builder;
|
// return builder;
|
||||||
// }
|
// }
|
||||||
|
|
||||||
private DataPoint mergeDataPoints(DataPoint dp1, DataPoint dp2) {
|
private Values mergeDataPoints(Values dp1, Values dp2) {
|
||||||
if (dp1 == null) {
|
if (dp1 == null) {
|
||||||
return dp2;
|
return dp2;
|
||||||
}
|
}
|
||||||
@ -121,7 +122,7 @@ public class MergeDataAction extends ManyToOneAction<Table, Table> {
|
|||||||
// абсолютные ошибки складываются квадратично
|
// абсолютные ошибки складываются квадратично
|
||||||
double crErr = Math.sqrt(err1 * err1 * t1 * t1 + err2 * err2 * t2 * t2) / time;
|
double crErr = Math.sqrt(err1 * err1 * t1 * t1 + err2 * err2 * t2 * t2) / time;
|
||||||
|
|
||||||
MapPoint.Builder map = new MapPoint(parnames, Uset, Uread, time, total, wind, cr, crErr).builder();
|
ValueMap.Builder map = new ValueMap(parnames, Uset, Uread, time, total, wind, cr, crErr).builder();
|
||||||
|
|
||||||
if (dp1.names().contains("relCR") && dp2.names().contains("relCR")) {
|
if (dp1.names().contains("relCR") && dp2.names().contains("relCR")) {
|
||||||
double relCR = (dp1.getDouble("relCR") + dp2.getDouble("relCR")) / 2;
|
double relCR = (dp1.getDouble("relCR") + dp2.getDouble("relCR")) / 2;
|
||||||
@ -134,12 +135,12 @@ public class MergeDataAction extends ManyToOneAction<Table, Table> {
|
|||||||
|
|
||||||
private Table mergeDataSets(String name, Collection<Table> ds) {
|
private Table mergeDataSets(String name, Collection<Table> ds) {
|
||||||
//Сливаем все точки в один набор данных
|
//Сливаем все точки в один набор данных
|
||||||
Map<Double, List<DataPoint>> points = new LinkedHashMap<>();
|
Map<Double, List<Values>> points = new LinkedHashMap<>();
|
||||||
for (Table d : ds) {
|
for (Table d : ds) {
|
||||||
if (!d.getFormat().names().contains(parnames)) {
|
if (!d.getFormat().names().contains(parnames)) {
|
||||||
throw new IllegalArgumentException();
|
throw new IllegalArgumentException();
|
||||||
}
|
}
|
||||||
for (DataPoint dp : d) {
|
for (Values dp : d) {
|
||||||
double uset = dp.getValue(parnames[0]).doubleValue();
|
double uset = dp.getValue(parnames[0]).doubleValue();
|
||||||
if (!points.containsKey(uset)) {
|
if (!points.containsKey(uset)) {
|
||||||
points.put(uset, new ArrayList<>());
|
points.put(uset, new ArrayList<>());
|
||||||
@ -148,11 +149,11 @@ public class MergeDataAction extends ManyToOneAction<Table, Table> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
List<DataPoint> res = new ArrayList<>();
|
List<Values> res = new ArrayList<>();
|
||||||
|
|
||||||
points.entrySet().stream().map((entry) -> {
|
points.entrySet().stream().map((entry) -> {
|
||||||
DataPoint curPoint = null;
|
Values curPoint = null;
|
||||||
for (DataPoint newPoint : entry.getValue()) {
|
for (Values newPoint : entry.getValue()) {
|
||||||
curPoint = mergeDataPoints(curPoint, newPoint);
|
curPoint = mergeDataPoints(curPoint, newPoint);
|
||||||
}
|
}
|
||||||
return curPoint;
|
return curPoint;
|
||||||
|
@ -23,8 +23,12 @@ import hep.dataforge.exceptions.ContentException;
|
|||||||
import hep.dataforge.io.ColumnedDataWriter;
|
import hep.dataforge.io.ColumnedDataWriter;
|
||||||
import hep.dataforge.meta.Laminate;
|
import hep.dataforge.meta.Laminate;
|
||||||
import hep.dataforge.meta.Meta;
|
import hep.dataforge.meta.Meta;
|
||||||
import hep.dataforge.tables.*;
|
import hep.dataforge.tables.ListTable;
|
||||||
|
import hep.dataforge.tables.Table;
|
||||||
|
import hep.dataforge.tables.TableTransform;
|
||||||
|
import hep.dataforge.tables.ValueMap;
|
||||||
import hep.dataforge.values.Value;
|
import hep.dataforge.values.Value;
|
||||||
|
import hep.dataforge.values.Values;
|
||||||
import javafx.util.Pair;
|
import javafx.util.Pair;
|
||||||
import org.apache.commons.math3.analysis.interpolation.SplineInterpolator;
|
import org.apache.commons.math3.analysis.interpolation.SplineInterpolator;
|
||||||
import org.apache.commons.math3.analysis.polynomials.PolynomialSplineFunction;
|
import org.apache.commons.math3.analysis.polynomials.PolynomialSplineFunction;
|
||||||
@ -50,7 +54,7 @@ public class MonitorCorrectAction extends OneToOneAction<Table, Table> {
|
|||||||
|
|
||||||
private static final String[] monitorNames = {"Timestamp", "Total", "CR", "CRerr"};
|
private static final String[] monitorNames = {"Timestamp", "Total", "CR", "CRerr"};
|
||||||
|
|
||||||
CopyOnWriteArrayList<DataPoint> monitorPoints = new CopyOnWriteArrayList<>();
|
CopyOnWriteArrayList<Values> monitorPoints = new CopyOnWriteArrayList<>();
|
||||||
//FIXME remove from state
|
//FIXME remove from state
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -58,7 +62,7 @@ public class MonitorCorrectAction extends OneToOneAction<Table, Table> {
|
|||||||
|
|
||||||
double monitor = meta.getDouble("monitorPoint", Double.NaN);
|
double monitor = meta.getDouble("monitorPoint", Double.NaN);
|
||||||
|
|
||||||
TreeMap<Instant, DataPoint> index = getMonitorIndex(monitor, sourceData);
|
TreeMap<Instant, Values> index = getMonitorIndex(monitor, sourceData);
|
||||||
if (index.isEmpty()) {
|
if (index.isEmpty()) {
|
||||||
context.getChronicle(name).reportError("No monitor points found");
|
context.getChronicle(name).reportError("No monitor points found");
|
||||||
return sourceData;
|
return sourceData;
|
||||||
@ -67,7 +71,7 @@ public class MonitorCorrectAction extends OneToOneAction<Table, Table> {
|
|||||||
double totalAv = 0;
|
double totalAv = 0;
|
||||||
String head = "";
|
String head = "";
|
||||||
head += String.format("%20s\t%10s\t%s%n", "Timestamp", "Total", "CR in window");
|
head += String.format("%20s\t%10s\t%s%n", "Timestamp", "Total", "CR in window");
|
||||||
for (DataPoint dp : index.values()) {
|
for (Values dp : index.values()) {
|
||||||
head += String.format("%20s\t%10d\t%g%n", getTime(dp).toString(), getTotal(dp), getCR(dp));
|
head += String.format("%20s\t%10d\t%g%n", getTime(dp).toString(), getTotal(dp), getCR(dp));
|
||||||
norm += getCR(dp) / index.size();
|
norm += getCR(dp) / index.size();
|
||||||
totalAv += getTotal(dp) / index.size();
|
totalAv += getTotal(dp) / index.size();
|
||||||
@ -76,10 +80,10 @@ public class MonitorCorrectAction extends OneToOneAction<Table, Table> {
|
|||||||
|
|
||||||
head += String.format("%20s\t%10g\t%g%n", "Average", totalAv, norm);
|
head += String.format("%20s\t%10g\t%g%n", "Average", totalAv, norm);
|
||||||
|
|
||||||
List<DataPoint> dataList = new ArrayList<>();
|
List<Values> dataList = new ArrayList<>();
|
||||||
|
|
||||||
for (DataPoint dp : sourceData) {
|
for (Values dp : sourceData) {
|
||||||
MapPoint.Builder pb = new MapPoint.Builder(dp);
|
ValueMap.Builder pb = new ValueMap.Builder(dp);
|
||||||
pb.putValue("Monitor", 1.0);
|
pb.putValue("Monitor", 1.0);
|
||||||
if (!isMonitorPoint(monitor, dp) || index.isEmpty()) {
|
if (!isMonitorPoint(monitor, dp) || index.isEmpty()) {
|
||||||
Pair<Double, Double> corr;
|
Pair<Double, Double> corr;
|
||||||
@ -139,7 +143,7 @@ public class MonitorCorrectAction extends OneToOneAction<Table, Table> {
|
|||||||
return data;
|
return data;
|
||||||
}
|
}
|
||||||
|
|
||||||
private Pair<Double, Double> getSplineCorrection(TreeMap<Instant, DataPoint> index, DataPoint dp, double norm) {
|
private Pair<Double, Double> getSplineCorrection(TreeMap<Instant, Values> index, Values dp, double norm) {
|
||||||
double time = getTime(dp).toEpochMilli();
|
double time = getTime(dp).toEpochMilli();
|
||||||
|
|
||||||
double[] xs = new double[index.size()];
|
double[] xs = new double[index.size()];
|
||||||
@ -147,7 +151,7 @@ public class MonitorCorrectAction extends OneToOneAction<Table, Table> {
|
|||||||
|
|
||||||
int i = 0;
|
int i = 0;
|
||||||
|
|
||||||
for (Entry<Instant, DataPoint> entry : index.entrySet()) {
|
for (Entry<Instant, Values> entry : index.entrySet()) {
|
||||||
xs[i] = (double) entry.getKey().toEpochMilli();
|
xs[i] = (double) entry.getKey().toEpochMilli();
|
||||||
ys[i] = getCR(entry.getValue()) / norm;
|
ys[i] = getCR(entry.getValue()) / norm;
|
||||||
i++;
|
i++;
|
||||||
@ -162,10 +166,10 @@ public class MonitorCorrectAction extends OneToOneAction<Table, Table> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private Pair<Double, Double> getLinearCorrection(TreeMap<Instant, DataPoint> index, DataPoint dp, double norm) {
|
private Pair<Double, Double> getLinearCorrection(TreeMap<Instant, Values> index, Values dp, double norm) {
|
||||||
Instant time = getTime(dp);
|
Instant time = getTime(dp);
|
||||||
Entry<Instant, DataPoint> previousMonitor = index.floorEntry(time);
|
Entry<Instant, Values> previousMonitor = index.floorEntry(time);
|
||||||
Entry<Instant, DataPoint> nextMonitor = index.ceilingEntry(time);
|
Entry<Instant, Values> nextMonitor = index.ceilingEntry(time);
|
||||||
|
|
||||||
if (previousMonitor == null) {
|
if (previousMonitor == null) {
|
||||||
previousMonitor = nextMonitor;
|
previousMonitor = nextMonitor;
|
||||||
@ -203,25 +207,25 @@ public class MonitorCorrectAction extends OneToOneAction<Table, Table> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private boolean isMonitorPoint(double monitor, DataPoint point) {
|
private boolean isMonitorPoint(double monitor, Values point) {
|
||||||
return point.getValue("Uset").doubleValue() == monitor;
|
return point.getValue("Uset").doubleValue() == monitor;
|
||||||
}
|
}
|
||||||
|
|
||||||
private Instant getTime(DataPoint point) {
|
private Instant getTime(Values point) {
|
||||||
return point.getValue("Timestamp").timeValue();
|
return point.getValue("Timestamp").timeValue();
|
||||||
}
|
}
|
||||||
|
|
||||||
private int getTotal(DataPoint point) {
|
private int getTotal(Values point) {
|
||||||
return point.getValue("Total").intValue();
|
return point.getValue("Total").intValue();
|
||||||
}
|
}
|
||||||
|
|
||||||
private double getCR(DataPoint point) {
|
private double getCR(Values point) {
|
||||||
return point.getValue("CR").doubleValue();
|
return point.getValue("CR").doubleValue();
|
||||||
}
|
}
|
||||||
|
|
||||||
private TreeMap<Instant, DataPoint> getMonitorIndex(double monitor, Iterable<DataPoint> data) {
|
private TreeMap<Instant, Values> getMonitorIndex(double monitor, Iterable<Values> data) {
|
||||||
TreeMap<Instant, DataPoint> res = new TreeMap<>();
|
TreeMap<Instant, Values> res = new TreeMap<>();
|
||||||
for (DataPoint dp : data) {
|
for (Values dp : data) {
|
||||||
if (isMonitorPoint(monitor, dp)) {
|
if (isMonitorPoint(monitor, dp)) {
|
||||||
res.put(getTime(dp), dp);
|
res.put(getTime(dp), dp);
|
||||||
}
|
}
|
||||||
|
@ -25,7 +25,11 @@ import hep.dataforge.io.ColumnedDataWriter;
|
|||||||
import hep.dataforge.io.XMLMetaWriter;
|
import hep.dataforge.io.XMLMetaWriter;
|
||||||
import hep.dataforge.meta.Laminate;
|
import hep.dataforge.meta.Laminate;
|
||||||
import hep.dataforge.meta.Meta;
|
import hep.dataforge.meta.Meta;
|
||||||
import hep.dataforge.tables.*;
|
import hep.dataforge.tables.ListTable;
|
||||||
|
import hep.dataforge.tables.Table;
|
||||||
|
import hep.dataforge.tables.TableFormat;
|
||||||
|
import hep.dataforge.tables.ValueMap;
|
||||||
|
import hep.dataforge.values.Values;
|
||||||
import inr.numass.data.NumassData;
|
import inr.numass.data.NumassData;
|
||||||
import inr.numass.data.NumassPoint;
|
import inr.numass.data.NumassPoint;
|
||||||
import inr.numass.data.PointBuilders;
|
import inr.numass.data.PointBuilders;
|
||||||
@ -113,7 +117,7 @@ public class PrepareDataAction extends OneToOneAction<NumassData, Table> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
List<DataPoint> dataList = new ArrayList<>();
|
List<Values> dataList = new ArrayList<>();
|
||||||
for (NumassPoint point : dataFile) {
|
for (NumassPoint point : dataFile) {
|
||||||
|
|
||||||
long total = point.getTotalCount();
|
long total = point.getTotalCount();
|
||||||
@ -145,7 +149,7 @@ public class PrepareDataAction extends OneToOneAction<NumassData, Table> {
|
|||||||
|
|
||||||
Instant timestamp = point.getStartTime();
|
Instant timestamp = point.getStartTime();
|
||||||
|
|
||||||
dataList.add(new MapPoint(parnames, new Object[]{uset, uread, time, total, wind, correctionFactor, cr, crErr, timestamp}));
|
dataList.add(new ValueMap(parnames, new Object[]{uset, uread, time, total, wind, correctionFactor, cr, crErr, timestamp}));
|
||||||
}
|
}
|
||||||
|
|
||||||
TableFormat format;
|
TableFormat format;
|
||||||
|
@ -18,6 +18,7 @@ import hep.dataforge.plots.data.PlottableData;
|
|||||||
import hep.dataforge.plots.data.XYPlottable;
|
import hep.dataforge.plots.data.XYPlottable;
|
||||||
import hep.dataforge.tables.*;
|
import hep.dataforge.tables.*;
|
||||||
import hep.dataforge.values.ValueType;
|
import hep.dataforge.values.ValueType;
|
||||||
|
import hep.dataforge.values.Values;
|
||||||
import inr.numass.data.NumassData;
|
import inr.numass.data.NumassData;
|
||||||
import inr.numass.data.NumassPoint;
|
import inr.numass.data.NumassPoint;
|
||||||
|
|
||||||
@ -68,7 +69,7 @@ public class ShowEnergySpectrumAction extends OneToOneAction<NumassData, Table>
|
|||||||
|
|
||||||
ListTable.Builder builder = new ListTable.Builder(formatBuilder.build());
|
ListTable.Builder builder = new ListTable.Builder(formatBuilder.build());
|
||||||
rows.stream().forEachOrdered((Double channel) -> {
|
rows.stream().forEachOrdered((Double channel) -> {
|
||||||
MapPoint.Builder mb = new MapPoint.Builder();
|
ValueMap.Builder mb = new ValueMap.Builder();
|
||||||
mb.putValue("channel", channel);
|
mb.putValue("channel", channel);
|
||||||
valueMap.entrySet().forEach((Map.Entry<String, Map<Double, Double>> entry) -> {
|
valueMap.entrySet().forEach((Map.Entry<String, Map<Double, Double>> entry) -> {
|
||||||
mb.putValue(entry.getKey(), entry.getValue().get(channel));
|
mb.putValue(entry.getKey(), entry.getValue().get(channel));
|
||||||
@ -106,8 +107,8 @@ public class ShowEnergySpectrumAction extends OneToOneAction<NumassData, Table>
|
|||||||
String seriesName = String.format("%d: %s", index, entry.getKey());
|
String seriesName = String.format("%d: %s", index, entry.getKey());
|
||||||
|
|
||||||
String[] nameList = {XYAdapter.X_VALUE_KEY, XYAdapter.Y_VALUE_KEY};
|
String[] nameList = {XYAdapter.X_VALUE_KEY, XYAdapter.Y_VALUE_KEY};
|
||||||
List<DataPoint> data = entry.getValue().entrySet().stream()
|
List<Values> data = entry.getValue().entrySet().stream()
|
||||||
.map(e -> new MapPoint(nameList, e.getKey(), e.getValue()))
|
.map(e -> new ValueMap(nameList, e.getKey(), e.getValue()))
|
||||||
.collect(Collectors.toList());
|
.collect(Collectors.toList());
|
||||||
PlottableData datum = PlottableData.plot(seriesName, XYAdapter.DEFAULT_ADAPTER, data);
|
PlottableData datum = PlottableData.plot(seriesName, XYAdapter.DEFAULT_ADAPTER, data);
|
||||||
datum.configure(plottableConfig);
|
datum.configure(plottableConfig);
|
||||||
|
@ -11,10 +11,10 @@ import hep.dataforge.description.TypedActionDef;
|
|||||||
import hep.dataforge.io.ColumnedDataReader;
|
import hep.dataforge.io.ColumnedDataReader;
|
||||||
import hep.dataforge.io.ColumnedDataWriter;
|
import hep.dataforge.io.ColumnedDataWriter;
|
||||||
import hep.dataforge.meta.Laminate;
|
import hep.dataforge.meta.Laminate;
|
||||||
import hep.dataforge.tables.DataPoint;
|
|
||||||
import hep.dataforge.tables.ListTable;
|
import hep.dataforge.tables.ListTable;
|
||||||
import hep.dataforge.tables.MapPoint;
|
|
||||||
import hep.dataforge.tables.Table;
|
import hep.dataforge.tables.Table;
|
||||||
|
import hep.dataforge.tables.ValueMap;
|
||||||
|
import hep.dataforge.values.Values;
|
||||||
|
|
||||||
import java.io.File;
|
import java.io.File;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
@ -35,8 +35,8 @@ public class SubstractSpectrumAction extends OneToOneAction<Table, Table> {
|
|||||||
Table referenceTable = new ColumnedDataReader(referenceFile).toTable();
|
Table referenceTable = new ColumnedDataReader(referenceFile).toTable();
|
||||||
ListTable.Builder builder = new ListTable.Builder(input.getFormat());
|
ListTable.Builder builder = new ListTable.Builder(input.getFormat());
|
||||||
input.stream().forEach(point -> {
|
input.stream().forEach(point -> {
|
||||||
MapPoint.Builder pointBuilder = new MapPoint.Builder(point);
|
ValueMap.Builder pointBuilder = new ValueMap.Builder(point);
|
||||||
Optional<DataPoint> referencePoint = referenceTable.stream()
|
Optional<Values> referencePoint = referenceTable.stream()
|
||||||
.filter(p -> Math.abs(p.getDouble("Uset") - point.getDouble("Uset")) < 0.1).findFirst();
|
.filter(p -> Math.abs(p.getDouble("Uset") - point.getDouble("Uset")) < 0.1).findFirst();
|
||||||
if (referencePoint.isPresent()) {
|
if (referencePoint.isPresent()) {
|
||||||
pointBuilder.putValue("CR", Math.max(0, point.getDouble("CR") - referencePoint.get().getDouble("CR")));
|
pointBuilder.putValue("CR", Math.max(0, point.getDouble("CR") - referencePoint.get().getDouble("CR")));
|
||||||
|
@ -25,8 +25,12 @@ import hep.dataforge.io.ColumnedDataWriter;
|
|||||||
import hep.dataforge.meta.Laminate;
|
import hep.dataforge.meta.Laminate;
|
||||||
import hep.dataforge.meta.Meta;
|
import hep.dataforge.meta.Meta;
|
||||||
import hep.dataforge.stat.fit.FitState;
|
import hep.dataforge.stat.fit.FitState;
|
||||||
import hep.dataforge.tables.*;
|
import hep.dataforge.tables.ListTable;
|
||||||
|
import hep.dataforge.tables.Table;
|
||||||
|
import hep.dataforge.tables.TableFormat;
|
||||||
|
import hep.dataforge.tables.ValueMap;
|
||||||
import hep.dataforge.values.Value;
|
import hep.dataforge.values.Value;
|
||||||
|
import hep.dataforge.values.Values;
|
||||||
|
|
||||||
import java.io.OutputStream;
|
import java.io.OutputStream;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
@ -92,7 +96,7 @@ public class SummaryAction extends ManyToOneAction<FitState, Table> {
|
|||||||
weights[i] += weight;
|
weights[i] += weight;
|
||||||
}
|
}
|
||||||
values[values.length - 1] = Value.of(state.getChi2());
|
values[values.length - 1] = Value.of(state.getChi2());
|
||||||
DataPoint point = new MapPoint(names, values);
|
Values point = new ValueMap(names, values);
|
||||||
res.row(point);
|
res.row(point);
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -105,7 +109,7 @@ public class SummaryAction extends ManyToOneAction<FitState, Table> {
|
|||||||
averageValues[2 * i + 2] = Value.of(1 / Math.sqrt(weights[i]));
|
averageValues[2 * i + 2] = Value.of(1 / Math.sqrt(weights[i]));
|
||||||
}
|
}
|
||||||
|
|
||||||
res.row(new MapPoint(names, averageValues));
|
res.row(new ValueMap(names, averageValues));
|
||||||
|
|
||||||
return res.build();
|
return res.build();
|
||||||
}
|
}
|
||||||
|
@ -18,9 +18,9 @@ package inr.numass.data;
|
|||||||
import hep.dataforge.stat.fit.ParamSet;
|
import hep.dataforge.stat.fit.ParamSet;
|
||||||
import hep.dataforge.stat.models.Generator;
|
import hep.dataforge.stat.models.Generator;
|
||||||
import hep.dataforge.stat.models.XYModel;
|
import hep.dataforge.stat.models.XYModel;
|
||||||
import hep.dataforge.tables.DataPoint;
|
|
||||||
import hep.dataforge.tables.ListTable;
|
import hep.dataforge.tables.ListTable;
|
||||||
import hep.dataforge.tables.Table;
|
import hep.dataforge.tables.Table;
|
||||||
|
import hep.dataforge.values.Values;
|
||||||
import org.apache.commons.math3.random.JDKRandomGenerator;
|
import org.apache.commons.math3.random.JDKRandomGenerator;
|
||||||
import org.apache.commons.math3.random.RandomDataGenerator;
|
import org.apache.commons.math3.random.RandomDataGenerator;
|
||||||
import org.apache.commons.math3.random.RandomGenerator;
|
import org.apache.commons.math3.random.RandomGenerator;
|
||||||
@ -66,9 +66,9 @@ public class SpectrumGenerator implements Generator {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Table generateData(Iterable<DataPoint> config) {
|
public Table generateData(Iterable<Values> config) {
|
||||||
ListTable.Builder res = new ListTable.Builder(adapter.getFormat());
|
ListTable.Builder res = new ListTable.Builder(adapter.getFormat());
|
||||||
for (Iterator<DataPoint> it = config.iterator(); it.hasNext();) {
|
for (Iterator<Values> it = config.iterator(); it.hasNext();) {
|
||||||
res.row(this.generateDataPoint(it.next()));
|
res.row(this.generateDataPoint(it.next()));
|
||||||
}
|
}
|
||||||
return res.build();
|
return res.build();
|
||||||
@ -81,21 +81,21 @@ public class SpectrumGenerator implements Generator {
|
|||||||
* @param config
|
* @param config
|
||||||
* @return
|
* @return
|
||||||
*/
|
*/
|
||||||
public Table generateExactData(Iterable<DataPoint> config) {
|
public Table generateExactData(Iterable<Values> config) {
|
||||||
ListTable.Builder res = new ListTable.Builder(adapter.getFormat());
|
ListTable.Builder res = new ListTable.Builder(adapter.getFormat());
|
||||||
for (Iterator<DataPoint> it = config.iterator(); it.hasNext();) {
|
for (Iterator<Values> it = config.iterator(); it.hasNext();) {
|
||||||
res.row(this.generateExactDataPoint(it.next()));
|
res.row(this.generateExactDataPoint(it.next()));
|
||||||
}
|
}
|
||||||
return res.build();
|
return res.build();
|
||||||
}
|
}
|
||||||
|
|
||||||
public DataPoint generateExactDataPoint(DataPoint configPoint) {
|
public Values generateExactDataPoint(Values configPoint) {
|
||||||
double mu = this.getMu(configPoint);
|
double mu = this.getMu(configPoint);
|
||||||
return adapter.buildSpectrumDataPoint(this.getX(configPoint), (long) mu, this.getTime(configPoint));
|
return adapter.buildSpectrumDataPoint(this.getX(configPoint), (long) mu, this.getTime(configPoint));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public DataPoint generateDataPoint(DataPoint configPoint) {
|
public Values generateDataPoint(Values configPoint) {
|
||||||
double mu = this.getMu(configPoint);
|
double mu = this.getMu(configPoint);
|
||||||
if (isNaN(mu) || (mu < 0)) {
|
if (isNaN(mu) || (mu < 0)) {
|
||||||
throw new IllegalStateException("Negative input parameter for generator.");
|
throw new IllegalStateException("Negative input parameter for generator.");
|
||||||
@ -138,7 +138,7 @@ public class SpectrumGenerator implements Generator {
|
|||||||
return this.genType.name();
|
return this.genType.name();
|
||||||
}
|
}
|
||||||
|
|
||||||
private double getMu(DataPoint point) {
|
private double getMu(Values point) {
|
||||||
return source.value(this.getX(point), params) * this.getTime(point);
|
return source.value(this.getX(point), params) * this.getTime(point);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -148,7 +148,7 @@ public class SpectrumGenerator implements Generator {
|
|||||||
// }
|
// }
|
||||||
// return sqrt(this.getMu(point));
|
// return sqrt(this.getMu(point));
|
||||||
// }
|
// }
|
||||||
private double getTime(DataPoint point) {
|
private double getTime(Values point) {
|
||||||
|
|
||||||
return adapter.getTime(point);
|
return adapter.getTime(point);
|
||||||
// if (point.containsName("time")) {
|
// if (point.containsName("time")) {
|
||||||
@ -172,7 +172,7 @@ public class SpectrumGenerator implements Generator {
|
|||||||
this.adapter = adapter;
|
this.adapter = adapter;
|
||||||
}
|
}
|
||||||
|
|
||||||
private double getX(DataPoint point) {
|
private double getX(Values point) {
|
||||||
return adapter.getX(point).doubleValue();
|
return adapter.getX(point).doubleValue();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -17,9 +17,8 @@ package inr.numass.data;
|
|||||||
|
|
||||||
import hep.dataforge.maths.NamedMatrix;
|
import hep.dataforge.maths.NamedMatrix;
|
||||||
import hep.dataforge.stat.parametric.ParametricFunction;
|
import hep.dataforge.stat.parametric.ParametricFunction;
|
||||||
import hep.dataforge.tables.DataPoint;
|
|
||||||
import hep.dataforge.tables.ListTable;
|
import hep.dataforge.tables.ListTable;
|
||||||
import hep.dataforge.values.NamedValueSet;
|
import hep.dataforge.values.Values;
|
||||||
import org.apache.commons.math3.analysis.UnivariateFunction;
|
import org.apache.commons.math3.analysis.UnivariateFunction;
|
||||||
import org.apache.commons.math3.linear.Array2DRowRealMatrix;
|
import org.apache.commons.math3.linear.Array2DRowRealMatrix;
|
||||||
import org.apache.commons.math3.linear.RealMatrix;
|
import org.apache.commons.math3.linear.RealMatrix;
|
||||||
@ -38,7 +37,7 @@ public class SpectrumInformation {
|
|||||||
this.source = source;
|
this.source = source;
|
||||||
}
|
}
|
||||||
|
|
||||||
public NamedMatrix getExpetedCovariance(NamedValueSet set, ListTable data, String... parNames) {
|
public NamedMatrix getExpetedCovariance(Values set, ListTable data, String... parNames) {
|
||||||
String[] names = parNames;
|
String[] names = parNames;
|
||||||
if (names.length == 0) {
|
if (names.length == 0) {
|
||||||
names = source.namesAsArray();
|
names = source.namesAsArray();
|
||||||
@ -56,7 +55,7 @@ public class SpectrumInformation {
|
|||||||
* @param parNames
|
* @param parNames
|
||||||
* @return
|
* @return
|
||||||
*/
|
*/
|
||||||
public NamedMatrix getInformationMatrix(NamedValueSet set, ListTable data, String... parNames) {
|
public NamedMatrix getInformationMatrix(Values set, ListTable data, String... parNames) {
|
||||||
SpectrumDataAdapter reader = NumassDataUtils.adapter();
|
SpectrumDataAdapter reader = NumassDataUtils.adapter();
|
||||||
|
|
||||||
String[] names = parNames;
|
String[] names = parNames;
|
||||||
@ -67,7 +66,7 @@ public class SpectrumInformation {
|
|||||||
assert source.names().contains(names);
|
assert source.names().contains(names);
|
||||||
RealMatrix res = new Array2DRowRealMatrix(names.length, names.length);
|
RealMatrix res = new Array2DRowRealMatrix(names.length, names.length);
|
||||||
|
|
||||||
for (DataPoint dp : data) {
|
for (Values dp : data) {
|
||||||
/*PENDING Тут имеется глобальная неоптимальность связанная с тем,
|
/*PENDING Тут имеется глобальная неоптимальность связанная с тем,
|
||||||
* что при каждом вызове вычисляются две производные
|
* что при каждом вызове вычисляются две производные
|
||||||
* Нужно вычислять сразу всю матрицу для каждой точки, тогда количество
|
* Нужно вычислять сразу всю матрицу для каждой точки, тогда количество
|
||||||
@ -80,11 +79,11 @@ public class SpectrumInformation {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// формула правильная!
|
// формула правильная!
|
||||||
public double getPoinSignificance(NamedValueSet set, String name1, String name2, double x) {
|
public double getPoinSignificance(Values set, String name1, String name2, double x) {
|
||||||
return source.derivValue(name1, x, set) * source.derivValue(name2, x, set) / source.value(x, set);
|
return source.derivValue(name1, x, set) * source.derivValue(name2, x, set) / source.value(x, set);
|
||||||
}
|
}
|
||||||
|
|
||||||
public NamedMatrix getPointInfoMatrix(NamedValueSet set, double x, double t, String... parNames) {
|
public NamedMatrix getPointInfoMatrix(Values set, double x, double t, String... parNames) {
|
||||||
assert source.names().contains(set.namesAsArray());
|
assert source.names().contains(set.namesAsArray());
|
||||||
|
|
||||||
String[] names = parNames;
|
String[] names = parNames;
|
||||||
@ -119,7 +118,7 @@ public class SpectrumInformation {
|
|||||||
* @param name2
|
* @param name2
|
||||||
* @return
|
* @return
|
||||||
*/
|
*/
|
||||||
public UnivariateFunction getSignificanceFunction(final NamedValueSet set, final String name1, final String name2) {
|
public UnivariateFunction getSignificanceFunction(final Values set, final String name1, final String name2) {
|
||||||
return (double d) -> getPoinSignificance(set, name1, name2, d);
|
return (double d) -> getPoinSignificance(set, name1, name2, d);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -17,8 +17,8 @@ package inr.numass.models;
|
|||||||
|
|
||||||
import hep.dataforge.exceptions.NotDefinedException;
|
import hep.dataforge.exceptions.NotDefinedException;
|
||||||
import hep.dataforge.stat.parametric.AbstractParametricFunction;
|
import hep.dataforge.stat.parametric.AbstractParametricFunction;
|
||||||
import hep.dataforge.values.NamedValueSet;
|
|
||||||
import hep.dataforge.values.ValueProvider;
|
import hep.dataforge.values.ValueProvider;
|
||||||
|
import hep.dataforge.values.Values;
|
||||||
|
|
||||||
import java.io.File;
|
import java.io.File;
|
||||||
import java.io.FileInputStream;
|
import java.io.FileInputStream;
|
||||||
@ -130,7 +130,7 @@ public class BetaSpectrum extends AbstractParametricFunction implements RangedNa
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public double derivValue(String name, double E, NamedValueSet pars) throws NotDefinedException {
|
public double derivValue(String name, double E, Values pars) throws NotDefinedException {
|
||||||
if (this.fss == null) {
|
if (this.fss == null) {
|
||||||
return this.derivRootsterile(name, E, pars);
|
return this.derivRootsterile(name, E, pars);
|
||||||
}
|
}
|
||||||
@ -147,12 +147,12 @@ public class BetaSpectrum extends AbstractParametricFunction implements RangedNa
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Double max(NamedValueSet set) {
|
public Double max(Values set) {
|
||||||
return set.getDouble("E0");
|
return set.getDouble("E0");
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Double min(NamedValueSet set) {
|
public Double min(Values set) {
|
||||||
return 0d;
|
return 0d;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -216,7 +216,7 @@ public class BetaSpectrum extends AbstractParametricFunction implements RangedNa
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public double value(double E, NamedValueSet pars) {
|
public double value(double E, Values pars) {
|
||||||
if (this.fss == null) {
|
if (this.fss == null) {
|
||||||
return rootsterile(E, pars);
|
return rootsterile(E, pars);
|
||||||
}
|
}
|
||||||
|
@ -6,7 +6,7 @@
|
|||||||
package inr.numass.models;
|
package inr.numass.models;
|
||||||
|
|
||||||
import hep.dataforge.stat.parametric.ParametricFunction;
|
import hep.dataforge.stat.parametric.ParametricFunction;
|
||||||
import hep.dataforge.values.NamedValueSet;
|
import hep.dataforge.values.Values;
|
||||||
import inr.numass.utils.NumassIntegrator;
|
import inr.numass.utils.NumassIntegrator;
|
||||||
import inr.numass.utils.TritiumUtils;
|
import inr.numass.utils.TritiumUtils;
|
||||||
import org.apache.commons.math3.analysis.UnivariateFunction;
|
import org.apache.commons.math3.analysis.UnivariateFunction;
|
||||||
@ -38,7 +38,7 @@ public class CustomNBkgSpectrum extends NBkgSpectrum {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public double value(double x, NamedValueSet set) {
|
public double value(double x, Values set) {
|
||||||
if (customBackgroundFunction == null) {
|
if (customBackgroundFunction == null) {
|
||||||
return super.value(x, set);
|
return super.value(x, set);
|
||||||
} else {
|
} else {
|
||||||
|
@ -17,14 +17,15 @@ package inr.numass.models;
|
|||||||
|
|
||||||
import hep.dataforge.exceptions.NamingException;
|
import hep.dataforge.exceptions.NamingException;
|
||||||
import hep.dataforge.exceptions.NotDefinedException;
|
import hep.dataforge.exceptions.NotDefinedException;
|
||||||
import hep.dataforge.stat.parametric.AbstractParametricFunction;
|
|
||||||
import hep.dataforge.maths.integration.GaussRuleIntegrator;
|
import hep.dataforge.maths.integration.GaussRuleIntegrator;
|
||||||
import hep.dataforge.maths.integration.UnivariateIntegrator;
|
import hep.dataforge.maths.integration.UnivariateIntegrator;
|
||||||
import hep.dataforge.values.NamedValueSet;
|
import hep.dataforge.stat.parametric.AbstractParametricFunction;
|
||||||
import java.util.List;
|
import hep.dataforge.values.Values;
|
||||||
import org.apache.commons.math3.analysis.BivariateFunction;
|
import org.apache.commons.math3.analysis.BivariateFunction;
|
||||||
import org.apache.commons.math3.analysis.UnivariateFunction;
|
import org.apache.commons.math3.analysis.UnivariateFunction;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
* @author Darksnake
|
* @author Darksnake
|
||||||
@ -45,12 +46,12 @@ public class EmpiricalLossSpectrum extends AbstractParametricFunction {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public double derivValue(String parName, double x, NamedValueSet set) {
|
public double derivValue(String parName, double x, Values set) {
|
||||||
throw new NotDefinedException();
|
throw new NotDefinedException();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public double value(double U, NamedValueSet set) {
|
public double value(double U, Values set) {
|
||||||
if (U >= eMax) {
|
if (U >= eMax) {
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
@ -18,8 +18,8 @@ package inr.numass.models;
|
|||||||
import hep.dataforge.exceptions.NotDefinedException;
|
import hep.dataforge.exceptions.NotDefinedException;
|
||||||
import hep.dataforge.stat.parametric.AbstractParametricFunction;
|
import hep.dataforge.stat.parametric.AbstractParametricFunction;
|
||||||
import hep.dataforge.stat.parametric.ParametricFunction;
|
import hep.dataforge.stat.parametric.ParametricFunction;
|
||||||
import hep.dataforge.values.NamedValueSet;
|
|
||||||
import hep.dataforge.values.ValueProvider;
|
import hep.dataforge.values.ValueProvider;
|
||||||
|
import hep.dataforge.values.Values;
|
||||||
import org.apache.commons.math3.analysis.BivariateFunction;
|
import org.apache.commons.math3.analysis.BivariateFunction;
|
||||||
import org.apache.commons.math3.analysis.UnivariateFunction;
|
import org.apache.commons.math3.analysis.UnivariateFunction;
|
||||||
|
|
||||||
@ -38,7 +38,7 @@ public class ExperimentalVariableLossSpectrum extends VariableLossSpectrum {
|
|||||||
return new ExperimentalVariableLossSpectrum(new AbstractParametricFunction(new String[0]) {
|
return new ExperimentalVariableLossSpectrum(new AbstractParametricFunction(new String[0]) {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public double derivValue(String parName, double x, NamedValueSet set) {
|
public double derivValue(String parName, double x, Values set) {
|
||||||
throw new NotDefinedException();
|
throw new NotDefinedException();
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -48,7 +48,7 @@ public class ExperimentalVariableLossSpectrum extends VariableLossSpectrum {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public double value(double x, NamedValueSet set) {
|
public double value(double x, Values set) {
|
||||||
return transmission.value(x);
|
return transmission.value(x);
|
||||||
}
|
}
|
||||||
}, eMax,smootherW);
|
}, eMax,smootherW);
|
||||||
|
@ -16,8 +16,8 @@
|
|||||||
package inr.numass.models;
|
package inr.numass.models;
|
||||||
|
|
||||||
import hep.dataforge.io.IOUtils;
|
import hep.dataforge.io.IOUtils;
|
||||||
import hep.dataforge.tables.DataPoint;
|
|
||||||
import hep.dataforge.tables.PointSource;
|
import hep.dataforge.tables.PointSource;
|
||||||
|
import hep.dataforge.values.Values;
|
||||||
|
|
||||||
import java.io.InputStream;
|
import java.io.InputStream;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
@ -33,7 +33,7 @@ public class FSS {
|
|||||||
public FSS(InputStream stream) {
|
public FSS(InputStream stream) {
|
||||||
PointSource data = IOUtils.readColumnedData(stream, "E", "P");
|
PointSource data = IOUtils.readColumnedData(stream, "E", "P");
|
||||||
norm = 0;
|
norm = 0;
|
||||||
for (DataPoint dp : data) {
|
for (Values dp : data) {
|
||||||
es.add(dp.getDouble("E"));
|
es.add(dp.getDouble("E"));
|
||||||
double p = dp.getDouble("P");
|
double p = dp.getDouble("P");
|
||||||
ps.add(p);
|
ps.add(p);
|
||||||
|
@ -19,8 +19,8 @@ import hep.dataforge.context.Global;
|
|||||||
import hep.dataforge.exceptions.NameNotFoundException;
|
import hep.dataforge.exceptions.NameNotFoundException;
|
||||||
import hep.dataforge.stat.parametric.AbstractParametricFunction;
|
import hep.dataforge.stat.parametric.AbstractParametricFunction;
|
||||||
import hep.dataforge.stat.parametric.ParametricFunction;
|
import hep.dataforge.stat.parametric.ParametricFunction;
|
||||||
import hep.dataforge.values.NamedValueSet;
|
|
||||||
import hep.dataforge.values.ValueProvider;
|
import hep.dataforge.values.ValueProvider;
|
||||||
|
import hep.dataforge.values.Values;
|
||||||
import org.apache.commons.math3.analysis.UnivariateFunction;
|
import org.apache.commons.math3.analysis.UnivariateFunction;
|
||||||
import org.apache.commons.math3.analysis.integration.SimpsonIntegrator;
|
import org.apache.commons.math3.analysis.integration.SimpsonIntegrator;
|
||||||
import org.apache.commons.math3.analysis.integration.UnivariateIntegrator;
|
import org.apache.commons.math3.analysis.integration.UnivariateIntegrator;
|
||||||
@ -51,7 +51,7 @@ public class GaussResolution extends AbstractParametricFunction implements Trans
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public double derivValue(String name, double X, NamedValueSet pars) {
|
public double derivValue(String name, double X, Values pars) {
|
||||||
if (abs(X - getPos(pars)) > cutoff * getW(pars)) {
|
if (abs(X - getPos(pars)) > cutoff * getW(pars)) {
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
@ -71,7 +71,7 @@ public class GaussResolution extends AbstractParametricFunction implements Trans
|
|||||||
int maxEval = Global.instance().getInt("INTEGR_POINTS", 500);
|
int maxEval = Global.instance().getInt("INTEGR_POINTS", 500);
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public double derivValue(String parName, double x, NamedValueSet set) {
|
public double derivValue(String parName, double x, Values set) {
|
||||||
double a = getLowerBound(set);
|
double a = getLowerBound(set);
|
||||||
double b = getUpperBound(set);
|
double b = getUpperBound(set);
|
||||||
assert b > a;
|
assert b > a;
|
||||||
@ -87,7 +87,7 @@ public class GaussResolution extends AbstractParametricFunction implements Trans
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public double value(double x, NamedValueSet set) {
|
public double value(double x, Values set) {
|
||||||
double a = getLowerBound(set);
|
double a = getLowerBound(set);
|
||||||
double b = getUpperBound(set);
|
double b = getUpperBound(set);
|
||||||
assert b > a;
|
assert b > a;
|
||||||
@ -97,11 +97,11 @@ public class GaussResolution extends AbstractParametricFunction implements Trans
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public double getDeriv(String name, NamedValueSet set, double input, double output) {
|
public double getDeriv(String name, Values set, double input, double output) {
|
||||||
return this.derivValue(name, output - input, set);
|
return this.derivValue(name, output - input, set);
|
||||||
}
|
}
|
||||||
|
|
||||||
private UnivariateFunction getDerivProduct(final String name, final ParametricFunction bare, final NamedValueSet pars, final double x0) {
|
private UnivariateFunction getDerivProduct(final String name, final ParametricFunction bare, final Values pars, final double x0) {
|
||||||
return (double x) -> {
|
return (double x) -> {
|
||||||
double res1;
|
double res1;
|
||||||
double res2;
|
double res2;
|
||||||
@ -119,7 +119,7 @@ public class GaussResolution extends AbstractParametricFunction implements Trans
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
private double getLowerBound(final NamedValueSet pars) {
|
private double getLowerBound(final Values pars) {
|
||||||
return getPos(pars) - cutoff * getW(pars);
|
return getPos(pars) - cutoff * getW(pars);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -129,7 +129,7 @@ public class GaussResolution extends AbstractParametricFunction implements Trans
|
|||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
private UnivariateFunction getProduct(final ParametricFunction bare, final NamedValueSet pars, final double x0) {
|
private UnivariateFunction getProduct(final ParametricFunction bare, final Values pars, final double x0) {
|
||||||
return (double x) -> {
|
return (double x) -> {
|
||||||
double res = bare.value(x0 - x, pars) * GaussResolution.this.value(x, pars);
|
double res = bare.value(x0 - x, pars) * GaussResolution.this.value(x, pars);
|
||||||
assert !isNaN(res);
|
assert !isNaN(res);
|
||||||
@ -137,12 +137,12 @@ public class GaussResolution extends AbstractParametricFunction implements Trans
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
private double getUpperBound(final NamedValueSet pars) {
|
private double getUpperBound(final Values pars) {
|
||||||
return getPos(pars) + cutoff * getW(pars);
|
return getPos(pars) + cutoff * getW(pars);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public double getValue(NamedValueSet set, double input, double output) {
|
public double getValue(Values set, double input, double output) {
|
||||||
return this.value(output - input, set);
|
return this.value(output - input, set);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -156,7 +156,7 @@ public class GaussResolution extends AbstractParametricFunction implements Trans
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public double value(double x, NamedValueSet pars) {
|
public double value(double x, Values pars) {
|
||||||
if (abs(x - getPos(pars)) > cutoff * getW(pars)) {
|
if (abs(x - getPos(pars)) > cutoff * getW(pars)) {
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
@ -17,8 +17,9 @@ package inr.numass.models;
|
|||||||
|
|
||||||
import hep.dataforge.exceptions.NotDefinedException;
|
import hep.dataforge.exceptions.NotDefinedException;
|
||||||
import hep.dataforge.stat.parametric.AbstractParametricFunction;
|
import hep.dataforge.stat.parametric.AbstractParametricFunction;
|
||||||
import hep.dataforge.values.NamedValueSet;
|
|
||||||
import hep.dataforge.values.ValueProvider;
|
import hep.dataforge.values.ValueProvider;
|
||||||
|
import hep.dataforge.values.Values;
|
||||||
|
|
||||||
import static java.lang.Math.exp;
|
import static java.lang.Math.exp;
|
||||||
import static java.lang.Math.sqrt;
|
import static java.lang.Math.sqrt;
|
||||||
|
|
||||||
@ -36,7 +37,7 @@ public class GaussSourceSpectrum extends AbstractParametricFunction implements R
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public double derivValue(String parName, double E, NamedValueSet set) {
|
public double derivValue(String parName, double E, Values set) {
|
||||||
switch (parName) {
|
switch (parName) {
|
||||||
case "pos":
|
case "pos":
|
||||||
return getGaussPosDeriv(E, getPos(set), getSigma(set));
|
return getGaussPosDeriv(E, getPos(set), getSigma(set));
|
||||||
@ -61,12 +62,12 @@ public class GaussSourceSpectrum extends AbstractParametricFunction implements R
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Double max(NamedValueSet set) {
|
public Double max(Values set) {
|
||||||
return getPos(set) + cutoff * getSigma(set);
|
return getPos(set) + cutoff * getSigma(set);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Double min(NamedValueSet set) {
|
public Double min(Values set) {
|
||||||
return getPos(set) - cutoff * getSigma(set);
|
return getPos(set) - cutoff * getSigma(set);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -84,7 +85,7 @@ public class GaussSourceSpectrum extends AbstractParametricFunction implements R
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public double value(final double E, NamedValueSet set) {
|
public double value(final double E, Values set) {
|
||||||
return getGauss(E, getPos(set), getSigma(set));
|
return getGauss(E, getPos(set), getSigma(set));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -18,7 +18,7 @@ package inr.numass.models;
|
|||||||
import hep.dataforge.exceptions.NotDefinedException;
|
import hep.dataforge.exceptions.NotDefinedException;
|
||||||
import hep.dataforge.maths.integration.UnivariateIntegrator;
|
import hep.dataforge.maths.integration.UnivariateIntegrator;
|
||||||
import hep.dataforge.stat.parametric.AbstractParametricFunction;
|
import hep.dataforge.stat.parametric.AbstractParametricFunction;
|
||||||
import hep.dataforge.values.NamedValueSet;
|
import hep.dataforge.values.Values;
|
||||||
import inr.numass.utils.NumassIntegrator;
|
import inr.numass.utils.NumassIntegrator;
|
||||||
import org.apache.commons.math3.analysis.UnivariateFunction;
|
import org.apache.commons.math3.analysis.UnivariateFunction;
|
||||||
|
|
||||||
@ -40,7 +40,7 @@ public class GunSpectrum extends AbstractParametricFunction {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public double derivValue(String parName, final double U, NamedValueSet set) {
|
public double derivValue(String parName, final double U, Values set) {
|
||||||
final double pos = set.getDouble("pos");
|
final double pos = set.getDouble("pos");
|
||||||
final double sigma = set.getDouble("sigma");
|
final double sigma = set.getDouble("sigma");
|
||||||
final double resA = set.getDouble("resA");
|
final double resA = set.getDouble("resA");
|
||||||
@ -132,7 +132,7 @@ public class GunSpectrum extends AbstractParametricFunction {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public double value(final double U, NamedValueSet set) {
|
public double value(final double U, Values set) {
|
||||||
final double pos = set.getDouble("pos");
|
final double pos = set.getDouble("pos");
|
||||||
final double sigma = set.getDouble("sigma");
|
final double sigma = set.getDouble("sigma");
|
||||||
final double resA = set.getDouble("resA");
|
final double resA = set.getDouble("resA");
|
||||||
|
@ -17,13 +17,12 @@ package inr.numass.models;
|
|||||||
|
|
||||||
import hep.dataforge.exceptions.NotDefinedException;
|
import hep.dataforge.exceptions.NotDefinedException;
|
||||||
import hep.dataforge.names.Names;
|
import hep.dataforge.names.Names;
|
||||||
import hep.dataforge.values.NamedValueSet;
|
|
||||||
import hep.dataforge.values.ValueProvider;
|
import hep.dataforge.values.ValueProvider;
|
||||||
import static java.lang.Math.abs;
|
import hep.dataforge.values.Values;
|
||||||
import static java.lang.Math.exp;
|
|
||||||
import static java.lang.Math.sqrt;
|
|
||||||
import org.apache.commons.math3.analysis.UnivariateFunction;
|
import org.apache.commons.math3.analysis.UnivariateFunction;
|
||||||
|
|
||||||
|
import static java.lang.Math.*;
|
||||||
|
|
||||||
public class GunTailSpectrum implements RangedNamedSetSpectrum {
|
public class GunTailSpectrum implements RangedNamedSetSpectrum {
|
||||||
|
|
||||||
private final double cutoff = 4d;
|
private final double cutoff = 4d;
|
||||||
@ -31,17 +30,17 @@ public class GunTailSpectrum implements RangedNamedSetSpectrum {
|
|||||||
private final String[] list = {"pos", "tailShift", "tailAmp", "sigma"};
|
private final String[] list = {"pos", "tailShift", "tailAmp", "sigma"};
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public double derivValue(String parName, double x, NamedValueSet set) {
|
public double derivValue(String parName, double x, Values set) {
|
||||||
throw new NotDefinedException();
|
throw new NotDefinedException();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Double max(NamedValueSet set) {
|
public Double max(Values set) {
|
||||||
return set.getDouble("pos") + cutoff * set.getDouble("sigma");
|
return set.getDouble("pos") + cutoff * set.getDouble("sigma");
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Double min(NamedValueSet set) {
|
public Double min(Values set) {
|
||||||
return 0d;
|
return 0d;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -56,7 +55,7 @@ public class GunTailSpectrum implements RangedNamedSetSpectrum {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public double value(double E, NamedValueSet set) {
|
public double value(double E, Values set) {
|
||||||
double pos = set.getDouble("pos");
|
double pos = set.getDouble("pos");
|
||||||
double amp = set.getDouble("tailAmp");
|
double amp = set.getDouble("tailAmp");
|
||||||
double sigma = set.getDouble("sigma");
|
double sigma = set.getDouble("sigma");
|
||||||
|
@ -21,7 +21,7 @@ import hep.dataforge.maths.integration.UnivariateIntegrator;
|
|||||||
import hep.dataforge.plots.PlotFrame;
|
import hep.dataforge.plots.PlotFrame;
|
||||||
import hep.dataforge.plots.data.PlottableXYFunction;
|
import hep.dataforge.plots.data.PlottableXYFunction;
|
||||||
import hep.dataforge.utils.Misc;
|
import hep.dataforge.utils.Misc;
|
||||||
import hep.dataforge.values.NamedValueSet;
|
import hep.dataforge.values.Values;
|
||||||
import org.apache.commons.math3.analysis.BivariateFunction;
|
import org.apache.commons.math3.analysis.BivariateFunction;
|
||||||
import org.apache.commons.math3.analysis.UnivariateFunction;
|
import org.apache.commons.math3.analysis.UnivariateFunction;
|
||||||
import org.apache.commons.math3.exception.OutOfRangeException;
|
import org.apache.commons.math3.exception.OutOfRangeException;
|
||||||
@ -125,7 +125,7 @@ public class LossCalculator {
|
|||||||
return (e) -> func.value(e) / norm;
|
return (e) -> func.value(e) / norm;
|
||||||
}
|
}
|
||||||
|
|
||||||
public static UnivariateFunction getSingleScatterFunction(NamedValueSet set) {
|
public static UnivariateFunction getSingleScatterFunction(Values set) {
|
||||||
|
|
||||||
final double exPos = set.getDouble("exPos");
|
final double exPos = set.getDouble("exPos");
|
||||||
final double ionPos = set.getDouble("ionPos");
|
final double ionPos = set.getDouble("ionPos");
|
||||||
@ -156,7 +156,7 @@ public class LossCalculator {
|
|||||||
return instance;
|
return instance;
|
||||||
}
|
}
|
||||||
|
|
||||||
public static void plotScatter(PlotFrame frame, NamedValueSet set) {
|
public static void plotScatter(PlotFrame frame, Values set) {
|
||||||
//"X", "shift", "exPos", "ionPos", "exW", "ionW", "exIonRatio"
|
//"X", "shift", "exPos", "ionPos", "exW", "ionW", "exIonRatio"
|
||||||
|
|
||||||
// JFreeChartFrame frame = JFreeChartFrame.drawFrame("Differential scattering crosssection", null);
|
// JFreeChartFrame frame = JFreeChartFrame.drawFrame("Differential scattering crosssection", null);
|
||||||
|
@ -15,16 +15,17 @@
|
|||||||
*/
|
*/
|
||||||
package inr.numass.models;
|
package inr.numass.models;
|
||||||
|
|
||||||
|
import hep.dataforge.names.NamedUtils;
|
||||||
import hep.dataforge.stat.parametric.AbstractParametricFunction;
|
import hep.dataforge.stat.parametric.AbstractParametricFunction;
|
||||||
import hep.dataforge.stat.parametric.ParametricFunction;
|
import hep.dataforge.stat.parametric.ParametricFunction;
|
||||||
import hep.dataforge.names.NamedUtils;
|
|
||||||
import hep.dataforge.values.NamedValueSet;
|
|
||||||
import hep.dataforge.values.ValueProvider;
|
import hep.dataforge.values.ValueProvider;
|
||||||
import java.util.ArrayList;
|
import hep.dataforge.values.Values;
|
||||||
import java.util.List;
|
|
||||||
import org.apache.commons.math3.analysis.BivariateFunction;
|
import org.apache.commons.math3.analysis.BivariateFunction;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Modular spectrum for any source spectrum with separate calculation for
|
* Modular spectrum for any source spectrum with separate calculation for
|
||||||
* different transmission components
|
* different transmission components
|
||||||
@ -143,7 +144,7 @@ public class ModularSpectrum extends AbstractParametricFunction {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public double derivValue(String parName, double U, NamedValueSet set) {
|
public double derivValue(String parName, double U, Values set) {
|
||||||
if (U >= sourceSpectrum.max(set)) {
|
if (U >= sourceSpectrum.max(set)) {
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
@ -228,7 +229,7 @@ public class ModularSpectrum extends AbstractParametricFunction {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public double value(double U, NamedValueSet set) {
|
public double value(double U, Values set) {
|
||||||
if (U >= sourceSpectrum.max(set)) {
|
if (U >= sourceSpectrum.max(set)) {
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
@ -17,10 +17,11 @@ package inr.numass.models;
|
|||||||
|
|
||||||
import hep.dataforge.stat.parametric.AbstractParametricFunction;
|
import hep.dataforge.stat.parametric.AbstractParametricFunction;
|
||||||
import hep.dataforge.stat.parametric.ParametricFunction;
|
import hep.dataforge.stat.parametric.ParametricFunction;
|
||||||
import static hep.dataforge.names.NamedUtils.combineNamesWithEquals;
|
|
||||||
import hep.dataforge.utils.MultiCounter;
|
import hep.dataforge.utils.MultiCounter;
|
||||||
import hep.dataforge.values.NamedValueSet;
|
|
||||||
import hep.dataforge.values.ValueProvider;
|
import hep.dataforge.values.ValueProvider;
|
||||||
|
import hep.dataforge.values.Values;
|
||||||
|
|
||||||
|
import static hep.dataforge.names.NamedUtils.combineNamesWithEquals;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
@ -39,7 +40,7 @@ public class NBkgSpectrum extends AbstractParametricFunction {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public double derivValue(String parName, double x, NamedValueSet set) {
|
public double derivValue(String parName, double x, Values set) {
|
||||||
this.counter.increase(parName);
|
this.counter.increase(parName);
|
||||||
switch (parName) {
|
switch (parName) {
|
||||||
case "N":
|
case "N":
|
||||||
@ -72,7 +73,7 @@ public class NBkgSpectrum extends AbstractParametricFunction {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public double value(double x, NamedValueSet set) {
|
public double value(double x, Values set) {
|
||||||
this.counter.increase("value");
|
this.counter.increase("value");
|
||||||
return getN(set) * source.value(x, set) + getBkg(set);
|
return getN(set) * source.value(x, set) + getBkg(set);
|
||||||
}
|
}
|
||||||
|
@ -20,7 +20,7 @@ import hep.dataforge.maths.NamedVector;
|
|||||||
import hep.dataforge.names.AbstractNamedSet;
|
import hep.dataforge.names.AbstractNamedSet;
|
||||||
import hep.dataforge.stat.parametric.AbstractParametricFunction;
|
import hep.dataforge.stat.parametric.AbstractParametricFunction;
|
||||||
import hep.dataforge.stat.parametric.ParametricFunction;
|
import hep.dataforge.stat.parametric.ParametricFunction;
|
||||||
import hep.dataforge.values.NamedValueSet;
|
import hep.dataforge.values.Values;
|
||||||
import org.apache.commons.math3.analysis.UnivariateFunction;
|
import org.apache.commons.math3.analysis.UnivariateFunction;
|
||||||
import org.apache.commons.math3.analysis.interpolation.SplineInterpolator;
|
import org.apache.commons.math3.analysis.interpolation.SplineInterpolator;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
@ -64,7 +64,7 @@ public class NamedSpectrumCaching extends AbstractParametricFunction {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public double derivValue(String parName, double x, NamedValueSet set) {
|
public double derivValue(String parName, double x, Values set) {
|
||||||
if (!isCachingEnabled()) {
|
if (!isCachingEnabled()) {
|
||||||
return source.derivValue(parName, x, set);
|
return source.derivValue(parName, x, set);
|
||||||
}
|
}
|
||||||
@ -109,7 +109,7 @@ public class NamedSpectrumCaching extends AbstractParametricFunction {
|
|||||||
return source.providesDeriv(name);
|
return source.providesDeriv(name);
|
||||||
}
|
}
|
||||||
|
|
||||||
protected boolean sameSet(NamedValueSet set1, NamedValueSet set2) {
|
protected boolean sameSet(Values set1, Values set2) {
|
||||||
for (String name : this.names()) {
|
for (String name : this.names()) {
|
||||||
if (!Objects.equals(set1.getDouble(name), set2.getDouble(name))) {
|
if (!Objects.equals(set1.getDouble(name), set2.getDouble(name))) {
|
||||||
return false;
|
return false;
|
||||||
@ -135,7 +135,7 @@ public class NamedSpectrumCaching extends AbstractParametricFunction {
|
|||||||
/*
|
/*
|
||||||
* Подразумевается, что трансформация одна и та же и для спектра, и для производных.
|
* Подразумевается, что трансформация одна и та же и для спектра, и для производных.
|
||||||
*/
|
*/
|
||||||
protected double transformation(CacheElement cache, NamedValueSet newSet, double x) throws TransformationNotAvailable {
|
protected double transformation(CacheElement cache, Values newSet, double x) throws TransformationNotAvailable {
|
||||||
|
|
||||||
|
|
||||||
/*
|
/*
|
||||||
@ -147,7 +147,7 @@ public class NamedSpectrumCaching extends AbstractParametricFunction {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public double value(double x, NamedValueSet set) {
|
public double value(double x, Values set) {
|
||||||
if (!isCachingEnabled()) {
|
if (!isCachingEnabled()) {
|
||||||
return source.value(x, set);
|
return source.value(x, set);
|
||||||
}
|
}
|
||||||
@ -184,10 +184,10 @@ public class NamedSpectrumCaching extends AbstractParametricFunction {
|
|||||||
protected class CacheElement extends AbstractNamedSet implements UnivariateFunction {
|
protected class CacheElement extends AbstractNamedSet implements UnivariateFunction {
|
||||||
|
|
||||||
private UnivariateFunction cachedSpectrum;
|
private UnivariateFunction cachedSpectrum;
|
||||||
private final NamedValueSet cachedParameters;
|
private final Values cachedParameters;
|
||||||
String parName;
|
String parName;
|
||||||
|
|
||||||
CacheElement(NamedValueSet parameters, String parName) {
|
CacheElement(Values parameters, String parName) {
|
||||||
super(source);
|
super(source);
|
||||||
//на всякий случай обрезаем набор параметров до необходимого
|
//на всякий случай обрезаем набор параметров до необходимого
|
||||||
String[] names = source.namesAsArray();
|
String[] names = source.namesAsArray();
|
||||||
@ -196,7 +196,7 @@ public class NamedSpectrumCaching extends AbstractParametricFunction {
|
|||||||
generate(func);
|
generate(func);
|
||||||
}
|
}
|
||||||
|
|
||||||
CacheElement(NamedValueSet parameters) {
|
CacheElement(Values parameters) {
|
||||||
super(source);
|
super(source);
|
||||||
String[] names = source.namesAsArray();
|
String[] names = source.namesAsArray();
|
||||||
this.cachedParameters = new NamedVector(names, MathUtils.getDoubleArray(parameters));
|
this.cachedParameters = new NamedVector(names, MathUtils.getDoubleArray(parameters));
|
||||||
@ -224,7 +224,7 @@ public class NamedSpectrumCaching extends AbstractParametricFunction {
|
|||||||
return this.cachedSpectrum.value(x);
|
return this.cachedSpectrum.value(x);
|
||||||
}
|
}
|
||||||
|
|
||||||
public NamedValueSet getCachedParameters() {
|
public Values getCachedParameters() {
|
||||||
return this.cachedParameters;
|
return this.cachedParameters;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -15,7 +15,7 @@
|
|||||||
*/
|
*/
|
||||||
package inr.numass.models;
|
package inr.numass.models;
|
||||||
|
|
||||||
import hep.dataforge.values.NamedValueSet;
|
import hep.dataforge.values.Values;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
@ -36,12 +36,12 @@ public class SimpleRange implements SpectrumRange{
|
|||||||
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Double max(NamedValueSet set) {
|
public Double max(Values set) {
|
||||||
return max;
|
return max;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Double min(NamedValueSet set) {
|
public Double min(Values set) {
|
||||||
return min;
|
return min;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -15,7 +15,7 @@
|
|||||||
*/
|
*/
|
||||||
package inr.numass.models;
|
package inr.numass.models;
|
||||||
|
|
||||||
import hep.dataforge.values.NamedValueSet;
|
import hep.dataforge.values.Values;
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -23,6 +23,6 @@ import hep.dataforge.values.NamedValueSet;
|
|||||||
* @author Darksnake
|
* @author Darksnake
|
||||||
*/
|
*/
|
||||||
public interface SpectrumRange {
|
public interface SpectrumRange {
|
||||||
Double min(NamedValueSet set);
|
Double min(Values set);
|
||||||
Double max(NamedValueSet set);
|
Double max(Values set);
|
||||||
}
|
}
|
||||||
|
@ -16,10 +16,9 @@
|
|||||||
package inr.numass.models;
|
package inr.numass.models;
|
||||||
|
|
||||||
|
|
||||||
import hep.dataforge.stat.parametric.ParametricFunction;
|
|
||||||
import hep.dataforge.names.NameSetContainer;
|
import hep.dataforge.names.NameSetContainer;
|
||||||
import hep.dataforge.values.NamedValueSet;
|
import hep.dataforge.stat.parametric.ParametricFunction;
|
||||||
import hep.dataforge.values.ValueProvider;
|
import hep.dataforge.values.Values;
|
||||||
import org.apache.commons.math3.analysis.BivariateFunction;
|
import org.apache.commons.math3.analysis.BivariateFunction;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -28,18 +27,18 @@ import org.apache.commons.math3.analysis.BivariateFunction;
|
|||||||
*/
|
*/
|
||||||
public interface Transmission extends NameSetContainer{
|
public interface Transmission extends NameSetContainer{
|
||||||
|
|
||||||
double getValue(NamedValueSet set, double input, double output);
|
double getValue(Values set, double input, double output);
|
||||||
double getDeriv(String name, NamedValueSet set, double input, double output);
|
double getDeriv(String name, Values set, double input, double output);
|
||||||
boolean providesDeriv(String name);
|
boolean providesDeriv(String name);
|
||||||
|
|
||||||
ParametricFunction getConvolutedSpectrum(RangedNamedSetSpectrum bare);
|
ParametricFunction getConvolutedSpectrum(RangedNamedSetSpectrum bare);
|
||||||
|
|
||||||
|
|
||||||
default BivariateFunction getBivariateFunction(final NamedValueSet params){
|
default BivariateFunction getBivariateFunction(final Values params){
|
||||||
return (double input, double output) -> getValue(params, input, output);
|
return (double input, double output) -> getValue(params, input, output);
|
||||||
}
|
}
|
||||||
|
|
||||||
default BivariateFunction getBivariateDerivFunction(final String name, final NamedValueSet params){
|
default BivariateFunction getBivariateDerivFunction(final String name, final Values params){
|
||||||
return (double input, double output) -> getDeriv(name, params, input, output);
|
return (double input, double output) -> getDeriv(name, params, input, output);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -17,7 +17,7 @@ package inr.numass.models;
|
|||||||
|
|
||||||
import hep.dataforge.stat.parametric.AbstractParametricFunction;
|
import hep.dataforge.stat.parametric.AbstractParametricFunction;
|
||||||
import hep.dataforge.stat.parametric.ParametricFunction;
|
import hep.dataforge.stat.parametric.ParametricFunction;
|
||||||
import hep.dataforge.values.NamedValueSet;
|
import hep.dataforge.values.Values;
|
||||||
import inr.numass.utils.NumassIntegrator;
|
import inr.numass.utils.NumassIntegrator;
|
||||||
import org.apache.commons.math3.analysis.BivariateFunction;
|
import org.apache.commons.math3.analysis.BivariateFunction;
|
||||||
import org.apache.commons.math3.analysis.UnivariateFunction;
|
import org.apache.commons.math3.analysis.UnivariateFunction;
|
||||||
@ -45,7 +45,7 @@ class TransmissionConvolution extends AbstractParametricFunction {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public double derivValue(final String parName, final double U, NamedValueSet set) {
|
public double derivValue(final String parName, final double U, Values set) {
|
||||||
double min = range.min(set);
|
double min = range.min(set);
|
||||||
double max = range.max(set);
|
double max = range.max(set);
|
||||||
|
|
||||||
@ -67,7 +67,7 @@ class TransmissionConvolution extends AbstractParametricFunction {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public double value(final double U, NamedValueSet set) {
|
public double value(final double U, Values set) {
|
||||||
double min = range.min(set);
|
double min = range.min(set);
|
||||||
double max = range.max(set);
|
double max = range.max(set);
|
||||||
|
|
||||||
|
@ -20,16 +20,17 @@ import hep.dataforge.context.Context;
|
|||||||
import hep.dataforge.data.DataNode;
|
import hep.dataforge.data.DataNode;
|
||||||
import hep.dataforge.io.ColumnedDataReader;
|
import hep.dataforge.io.ColumnedDataReader;
|
||||||
import hep.dataforge.meta.Meta;
|
import hep.dataforge.meta.Meta;
|
||||||
import hep.dataforge.tables.DataPoint;
|
|
||||||
import hep.dataforge.tables.PointSource;
|
import hep.dataforge.tables.PointSource;
|
||||||
import hep.dataforge.tables.Table;
|
import hep.dataforge.tables.Table;
|
||||||
|
import hep.dataforge.values.Values;
|
||||||
|
import org.apache.commons.math3.analysis.UnivariateFunction;
|
||||||
|
import org.apache.commons.math3.analysis.interpolation.LinearInterpolator;
|
||||||
|
|
||||||
import java.io.File;
|
import java.io.File;
|
||||||
import java.io.FileInputStream;
|
import java.io.FileInputStream;
|
||||||
import java.io.FileNotFoundException;
|
import java.io.FileNotFoundException;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import org.apache.commons.math3.analysis.UnivariateFunction;
|
|
||||||
import org.apache.commons.math3.analysis.interpolation.LinearInterpolator;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
@ -61,7 +62,7 @@ public class TransmissionInterpolator implements UnivariateFunction {
|
|||||||
private double xmax;
|
private double xmax;
|
||||||
private double xmin;
|
private double xmin;
|
||||||
|
|
||||||
private TransmissionInterpolator(Iterable<DataPoint> data, String xName, String yName, int nSmooth, double w, double border) {
|
private TransmissionInterpolator(Iterable<Values> data, String xName, String yName, int nSmooth, double w, double border) {
|
||||||
prepareXY(data, xName, yName);
|
prepareXY(data, xName, yName);
|
||||||
double[] smoothed = smoothXY(x, y, w, border);
|
double[] smoothed = smoothXY(x, y, w, border);
|
||||||
//Циклы сглаживания
|
//Циклы сглаживания
|
||||||
@ -100,11 +101,11 @@ public class TransmissionInterpolator implements UnivariateFunction {
|
|||||||
* @param xName
|
* @param xName
|
||||||
* @param yName
|
* @param yName
|
||||||
*/
|
*/
|
||||||
private void prepareXY(Iterable<DataPoint> data, String xName, String yName) {
|
private void prepareXY(Iterable<Values> data, String xName, String yName) {
|
||||||
|
|
||||||
List<DataPoint> points = new ArrayList<>();
|
List<Values> points = new ArrayList<>();
|
||||||
|
|
||||||
for (DataPoint dp : data) {
|
for (Values dp : data) {
|
||||||
points.add(dp);
|
points.add(dp);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -15,12 +15,13 @@
|
|||||||
*/
|
*/
|
||||||
package inr.numass.models;
|
package inr.numass.models;
|
||||||
|
|
||||||
import hep.dataforge.stat.parametric.ParametricFunction;
|
|
||||||
import hep.dataforge.maths.NamedVector;
|
import hep.dataforge.maths.NamedVector;
|
||||||
import hep.dataforge.values.NamedValueSet;
|
import hep.dataforge.stat.parametric.ParametricFunction;
|
||||||
import static java.lang.Math.abs;
|
import hep.dataforge.values.Values;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
import static java.lang.Math.abs;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
* @author Darksnake
|
* @author Darksnake
|
||||||
@ -39,7 +40,7 @@ public class TritiumSpectrumCaching extends NamedSpectrumCaching {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected double transformation(CacheElement cache, NamedValueSet newSet, double x) throws TransformationNotAvailable {
|
protected double transformation(CacheElement cache, Values newSet, double x) throws TransformationNotAvailable {
|
||||||
double res;
|
double res;
|
||||||
NamedVector curSet = new NamedVector(newSet);
|
NamedVector curSet = new NamedVector(newSet);
|
||||||
double E0new = newSet.getDouble("E0");
|
double E0new = newSet.getDouble("E0");
|
||||||
|
@ -19,8 +19,8 @@ import hep.dataforge.exceptions.NotDefinedException;
|
|||||||
import hep.dataforge.maths.integration.UnivariateIntegrator;
|
import hep.dataforge.maths.integration.UnivariateIntegrator;
|
||||||
import hep.dataforge.stat.parametric.AbstractParametricFunction;
|
import hep.dataforge.stat.parametric.AbstractParametricFunction;
|
||||||
import hep.dataforge.stat.parametric.ParametricFunction;
|
import hep.dataforge.stat.parametric.ParametricFunction;
|
||||||
import hep.dataforge.values.NamedValueSet;
|
|
||||||
import hep.dataforge.values.ValueProvider;
|
import hep.dataforge.values.ValueProvider;
|
||||||
|
import hep.dataforge.values.Values;
|
||||||
import inr.numass.utils.NumassIntegrator;
|
import inr.numass.utils.NumassIntegrator;
|
||||||
import org.apache.commons.math3.analysis.BivariateFunction;
|
import org.apache.commons.math3.analysis.BivariateFunction;
|
||||||
import org.apache.commons.math3.analysis.UnivariateFunction;
|
import org.apache.commons.math3.analysis.UnivariateFunction;
|
||||||
@ -43,7 +43,7 @@ public class VariableLossSpectrum extends AbstractParametricFunction {
|
|||||||
return new VariableLossSpectrum(new AbstractParametricFunction(new String[0]) {
|
return new VariableLossSpectrum(new AbstractParametricFunction(new String[0]) {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public double derivValue(String parName, double x, NamedValueSet set) {
|
public double derivValue(String parName, double x, Values set) {
|
||||||
throw new NotDefinedException();
|
throw new NotDefinedException();
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -53,7 +53,7 @@ public class VariableLossSpectrum extends AbstractParametricFunction {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public double value(double x, NamedValueSet set) {
|
public double value(double x, Values set) {
|
||||||
return transmission.value(x);
|
return transmission.value(x);
|
||||||
}
|
}
|
||||||
}, eMax);
|
}, eMax);
|
||||||
@ -70,12 +70,12 @@ public class VariableLossSpectrum extends AbstractParametricFunction {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public double derivValue(String parName, double x, NamedValueSet set) {
|
public double derivValue(String parName, double x, Values set) {
|
||||||
throw new NotDefinedException();
|
throw new NotDefinedException();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public double value(double U, NamedValueSet set) {
|
public double value(double U, Values set) {
|
||||||
if (U >= eMax) {
|
if (U >= eMax) {
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
@ -7,7 +7,7 @@ package inr.numass.models.sterile;
|
|||||||
|
|
||||||
import hep.dataforge.exceptions.NotDefinedException;
|
import hep.dataforge.exceptions.NotDefinedException;
|
||||||
import hep.dataforge.stat.parametric.AbstractParametricBiFunction;
|
import hep.dataforge.stat.parametric.AbstractParametricBiFunction;
|
||||||
import hep.dataforge.values.NamedValueSet;
|
import hep.dataforge.values.Values;
|
||||||
|
|
||||||
import static java.lang.Math.*;
|
import static java.lang.Math.*;
|
||||||
|
|
||||||
@ -93,7 +93,7 @@ public class NumassBeta extends AbstractParametricBiFunction {
|
|||||||
* @return
|
* @return
|
||||||
* @throws NotDefinedException
|
* @throws NotDefinedException
|
||||||
*/
|
*/
|
||||||
private double derivRootsterile(String name, double E, double E0, NamedValueSet pars) throws NotDefinedException {
|
private double derivRootsterile(String name, double E, double E0, Values pars) throws NotDefinedException {
|
||||||
double mnu2 = getParameter("mnu2", pars);
|
double mnu2 = getParameter("mnu2", pars);
|
||||||
double mst2 = getParameter("msterile2", pars);
|
double mst2 = getParameter("msterile2", pars);
|
||||||
double u2 = getParameter("U2", pars);
|
double u2 = getParameter("U2", pars);
|
||||||
@ -180,7 +180,7 @@ public class NumassBeta extends AbstractParametricBiFunction {
|
|||||||
* @param pars
|
* @param pars
|
||||||
* @return
|
* @return
|
||||||
*/
|
*/
|
||||||
private double rootsterile(double E, double E0, NamedValueSet pars) {
|
private double rootsterile(double E, double E0, Values pars) {
|
||||||
double mnu2 = getParameter("mnu2", pars);
|
double mnu2 = getParameter("mnu2", pars);
|
||||||
double mst2 = getParameter("msterile2", pars);
|
double mst2 = getParameter("msterile2", pars);
|
||||||
double u2 = getParameter("U2", pars);
|
double u2 = getParameter("U2", pars);
|
||||||
@ -205,13 +205,13 @@ public class NumassBeta extends AbstractParametricBiFunction {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public double derivValue(String parName, double fs, double eIn, NamedValueSet pars) {
|
public double derivValue(String parName, double fs, double eIn, Values pars) {
|
||||||
double E0 = getParameter("E0", pars);
|
double E0 = getParameter("E0", pars);
|
||||||
return derivRootsterile(parName, eIn, E0 - fs, pars);
|
return derivRootsterile(parName, eIn, E0 - fs, pars);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public double value(double fs, double eIn, NamedValueSet pars) {
|
public double value(double fs, double eIn, Values pars) {
|
||||||
double E0 = getParameter("E0", pars);
|
double E0 = getParameter("E0", pars);
|
||||||
return rootsterile(eIn, E0 - fs, pars);
|
return rootsterile(eIn, E0 - fs, pars);
|
||||||
}
|
}
|
||||||
|
@ -9,7 +9,7 @@ import hep.dataforge.context.Context;
|
|||||||
import hep.dataforge.maths.MathPlugin;
|
import hep.dataforge.maths.MathPlugin;
|
||||||
import hep.dataforge.meta.Meta;
|
import hep.dataforge.meta.Meta;
|
||||||
import hep.dataforge.stat.parametric.AbstractParametricBiFunction;
|
import hep.dataforge.stat.parametric.AbstractParametricBiFunction;
|
||||||
import hep.dataforge.values.NamedValueSet;
|
import hep.dataforge.values.Values;
|
||||||
import inr.numass.models.ResolutionFunction;
|
import inr.numass.models.ResolutionFunction;
|
||||||
import inr.numass.utils.ExpressionUtils;
|
import inr.numass.utils.ExpressionUtils;
|
||||||
import org.apache.commons.math3.analysis.BivariateFunction;
|
import org.apache.commons.math3.analysis.BivariateFunction;
|
||||||
@ -59,7 +59,7 @@ public class NumassResolution extends AbstractParametricBiFunction {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public double derivValue(String parName, double x, double y, NamedValueSet set) {
|
public double derivValue(String parName, double x, double y, Values set) {
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -80,7 +80,7 @@ public class NumassResolution extends AbstractParametricBiFunction {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public double value(double E, double U, NamedValueSet set) {
|
public double value(double E, double U, Values set) {
|
||||||
assert resA > 0;
|
assert resA > 0;
|
||||||
if (resB <= 0) {
|
if (resB <= 0) {
|
||||||
return this.getValueFast(E, U);
|
return this.getValueFast(E, U);
|
||||||
|
@ -9,7 +9,7 @@ import hep.dataforge.context.Context;
|
|||||||
import hep.dataforge.maths.MathPlugin;
|
import hep.dataforge.maths.MathPlugin;
|
||||||
import hep.dataforge.meta.Meta;
|
import hep.dataforge.meta.Meta;
|
||||||
import hep.dataforge.stat.parametric.AbstractParametricBiFunction;
|
import hep.dataforge.stat.parametric.AbstractParametricBiFunction;
|
||||||
import hep.dataforge.values.NamedValueSet;
|
import hep.dataforge.values.Values;
|
||||||
import inr.numass.models.LossCalculator;
|
import inr.numass.models.LossCalculator;
|
||||||
import inr.numass.utils.ExpressionUtils;
|
import inr.numass.utils.ExpressionUtils;
|
||||||
import org.apache.commons.math3.analysis.BivariateFunction;
|
import org.apache.commons.math3.analysis.BivariateFunction;
|
||||||
@ -50,12 +50,12 @@ public class NumassTransmission extends AbstractParametricBiFunction {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public static double getX(double eIn, NamedValueSet set) {
|
public static double getX(double eIn, Values set) {
|
||||||
//From our article
|
//From our article
|
||||||
return set.getDouble("X") * Math.log(eIn / ION_POTENTIAL) * eIn * ION_POTENTIAL / 1.9580741410115568e6;
|
return set.getDouble("X") * Math.log(eIn / ION_POTENTIAL) * eIn * ION_POTENTIAL / 1.9580741410115568e6;
|
||||||
}
|
}
|
||||||
|
|
||||||
public static double p0(double eIn, NamedValueSet set) {
|
public static double p0(double eIn, Values set) {
|
||||||
return LossCalculator.instance().getLossProbability(0, getX(eIn, set));
|
return LossCalculator.instance().getLossProbability(0, getX(eIn, set));
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -64,7 +64,7 @@ public class NumassTransmission extends AbstractParametricBiFunction {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public double derivValue(String parName, double eIn, double eOut, NamedValueSet set) {
|
public double derivValue(String parName, double eIn, double eOut, Values set) {
|
||||||
switch (parName) {
|
switch (parName) {
|
||||||
case "trap":
|
case "trap":
|
||||||
return trapFunc.value(eIn, eOut);
|
return trapFunc.value(eIn, eOut);
|
||||||
@ -81,7 +81,7 @@ public class NumassTransmission extends AbstractParametricBiFunction {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public double value(double eIn, double eOut, NamedValueSet set) {
|
public double value(double eIn, double eOut, Values set) {
|
||||||
//calculate X taking into account its energy dependence
|
//calculate X taking into account its energy dependence
|
||||||
double X = getX(eIn, set);
|
double X = getX(eIn, set);
|
||||||
// loss part
|
// loss part
|
||||||
|
@ -15,7 +15,7 @@ import hep.dataforge.meta.Meta;
|
|||||||
import hep.dataforge.stat.parametric.AbstractParametricBiFunction;
|
import hep.dataforge.stat.parametric.AbstractParametricBiFunction;
|
||||||
import hep.dataforge.stat.parametric.AbstractParametricFunction;
|
import hep.dataforge.stat.parametric.AbstractParametricFunction;
|
||||||
import hep.dataforge.stat.parametric.ParametricBiFunction;
|
import hep.dataforge.stat.parametric.ParametricBiFunction;
|
||||||
import hep.dataforge.values.NamedValueSet;
|
import hep.dataforge.values.Values;
|
||||||
import inr.numass.models.FSS;
|
import inr.numass.models.FSS;
|
||||||
import inr.numass.utils.NumassIntegrator;
|
import inr.numass.utils.NumassIntegrator;
|
||||||
import org.apache.commons.math3.analysis.UnivariateFunction;
|
import org.apache.commons.math3.analysis.UnivariateFunction;
|
||||||
@ -89,7 +89,7 @@ public class SterileNeutrinoSpectrum extends AbstractParametricFunction {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public double derivValue(String parName, double u, NamedValueSet set) {
|
public double derivValue(String parName, double u, Values set) {
|
||||||
switch (parName) {
|
switch (parName) {
|
||||||
case "U2":
|
case "U2":
|
||||||
case "msterile2":
|
case "msterile2":
|
||||||
@ -105,7 +105,7 @@ public class SterileNeutrinoSpectrum extends AbstractParametricFunction {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public double value(double u, NamedValueSet set) {
|
public double value(double u, Values set) {
|
||||||
return integrate(u, source, transRes, set);
|
return integrate(u, source, transRes, set);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -128,7 +128,7 @@ public class SterileNeutrinoSpectrum extends AbstractParametricFunction {
|
|||||||
double u,
|
double u,
|
||||||
ParametricBiFunction sourceFunction,
|
ParametricBiFunction sourceFunction,
|
||||||
ParametricBiFunction transResFunction,
|
ParametricBiFunction transResFunction,
|
||||||
NamedValueSet set) {
|
Values set) {
|
||||||
|
|
||||||
double eMax = set.getDouble("E0") + 5d;
|
double eMax = set.getDouble("E0") + 5d;
|
||||||
|
|
||||||
@ -178,7 +178,7 @@ public class SterileNeutrinoSpectrum extends AbstractParametricFunction {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public double derivValue(String parName, double eIn, double u, NamedValueSet set) {
|
public double derivValue(String parName, double eIn, double u, Values set) {
|
||||||
switch (parName) {
|
switch (parName) {
|
||||||
case "X":
|
case "X":
|
||||||
//TODO implement p0 derivative
|
//TODO implement p0 derivative
|
||||||
@ -191,13 +191,13 @@ public class SterileNeutrinoSpectrum extends AbstractParametricFunction {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public double value(double eIn, double u, NamedValueSet set) {
|
public double value(double eIn, double u, Values set) {
|
||||||
|
|
||||||
double p0 = NumassTransmission.p0(eIn, set);
|
double p0 = NumassTransmission.p0(eIn, set);
|
||||||
return p0 * resolution.value(eIn, u, set) + lossRes(transmission, eIn, u, set);
|
return p0 * resolution.value(eIn, u, set) + lossRes(transmission, eIn, u, set);
|
||||||
}
|
}
|
||||||
|
|
||||||
private double lossRes(ParametricBiFunction transFunc, double eIn, double u, NamedValueSet set) {
|
private double lossRes(ParametricBiFunction transFunc, double eIn, double u, Values set) {
|
||||||
UnivariateFunction integrand = (eOut) -> transFunc.value(eIn, eOut, set) * resolution.value(eOut, u, set);
|
UnivariateFunction integrand = (eOut) -> transFunc.value(eIn, eOut, set) * resolution.value(eOut, u, set);
|
||||||
|
|
||||||
double border = u + 30;
|
double border = u + 30;
|
||||||
|
@ -23,10 +23,10 @@ import hep.dataforge.data.DataUtils;
|
|||||||
import hep.dataforge.io.ColumnedDataWriter;
|
import hep.dataforge.io.ColumnedDataWriter;
|
||||||
import hep.dataforge.meta.Meta;
|
import hep.dataforge.meta.Meta;
|
||||||
import hep.dataforge.meta.MetaBuilder;
|
import hep.dataforge.meta.MetaBuilder;
|
||||||
import hep.dataforge.tables.DataPoint;
|
|
||||||
import hep.dataforge.tables.ListTable;
|
import hep.dataforge.tables.ListTable;
|
||||||
import hep.dataforge.tables.MapPoint;
|
|
||||||
import hep.dataforge.tables.Table;
|
import hep.dataforge.tables.Table;
|
||||||
|
import hep.dataforge.tables.ValueMap;
|
||||||
|
import hep.dataforge.values.Values;
|
||||||
import hep.dataforge.workspace.AbstractTask;
|
import hep.dataforge.workspace.AbstractTask;
|
||||||
import hep.dataforge.workspace.TaskModel;
|
import hep.dataforge.workspace.TaskModel;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
@ -84,8 +84,8 @@ public class NumassSubstractEmptySourceTask extends AbstractTask<Table> {
|
|||||||
private Table subtract(Table merge, Table empty) {
|
private Table subtract(Table merge, Table empty) {
|
||||||
ListTable.Builder builder = new ListTable.Builder(merge.getFormat());
|
ListTable.Builder builder = new ListTable.Builder(merge.getFormat());
|
||||||
merge.stream().forEach(point -> {
|
merge.stream().forEach(point -> {
|
||||||
MapPoint.Builder pointBuilder = new MapPoint.Builder(point);
|
ValueMap.Builder pointBuilder = new ValueMap.Builder(point);
|
||||||
Optional<DataPoint> referencePoint = empty.stream()
|
Optional<Values> referencePoint = empty.stream()
|
||||||
.filter(p -> Math.abs(p.getDouble("Uset") - point.getDouble("Uset")) < 0.1).findFirst();
|
.filter(p -> Math.abs(p.getDouble("Uset") - point.getDouble("Uset")) < 0.1).findFirst();
|
||||||
if (referencePoint.isPresent()) {
|
if (referencePoint.isPresent()) {
|
||||||
pointBuilder.putValue("CR", Math.max(0, point.getDouble("CR") - referencePoint.get().getDouble("CR")));
|
pointBuilder.putValue("CR", Math.max(0, point.getDouble("CR") - referencePoint.get().getDouble("CR")));
|
||||||
|
@ -7,10 +7,10 @@ import hep.dataforge.data.DataNode;
|
|||||||
import hep.dataforge.description.TypedActionDef;
|
import hep.dataforge.description.TypedActionDef;
|
||||||
import hep.dataforge.meta.Laminate;
|
import hep.dataforge.meta.Laminate;
|
||||||
import hep.dataforge.meta.MetaBuilder;
|
import hep.dataforge.meta.MetaBuilder;
|
||||||
import hep.dataforge.tables.DataPoint;
|
|
||||||
import hep.dataforge.tables.Table;
|
import hep.dataforge.tables.Table;
|
||||||
import hep.dataforge.tables.TableTransform;
|
import hep.dataforge.tables.TableTransform;
|
||||||
import hep.dataforge.values.Value;
|
import hep.dataforge.values.Value;
|
||||||
|
import hep.dataforge.values.Values;
|
||||||
import hep.dataforge.workspace.SingleActionTask;
|
import hep.dataforge.workspace.SingleActionTask;
|
||||||
import hep.dataforge.workspace.TaskModel;
|
import hep.dataforge.workspace.TaskModel;
|
||||||
import inr.numass.utils.ExpressionUtils;
|
import inr.numass.utils.ExpressionUtils;
|
||||||
@ -60,7 +60,7 @@ public class NumassTableFilterTask extends SingleActionTask<Table, Table> {
|
|||||||
getLogger(inputMeta).debug("Filtering finished");
|
getLogger(inputMeta).debug("Filtering finished");
|
||||||
return TableTransform.filter(input, "Uset", uLo, uHi);
|
return TableTransform.filter(input, "Uset", uLo, uHi);
|
||||||
} else if (inputMeta.hasValue("filter.condition")) {
|
} else if (inputMeta.hasValue("filter.condition")) {
|
||||||
Predicate<DataPoint> predicate = (dp) -> ExpressionUtils.condition(inputMeta.getString("filter.condition"), unbox(dp));
|
Predicate<Values> predicate = (dp) -> ExpressionUtils.condition(inputMeta.getString("filter.condition"), unbox(dp));
|
||||||
return TableTransform.filter(input, predicate);
|
return TableTransform.filter(input, predicate);
|
||||||
} else {
|
} else {
|
||||||
throw new RuntimeException("No filtering condition specified");
|
throw new RuntimeException("No filtering condition specified");
|
||||||
@ -68,7 +68,7 @@ public class NumassTableFilterTask extends SingleActionTask<Table, Table> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private Map<String, Object> unbox(DataPoint dp) {
|
private Map<String, Object> unbox(Values dp) {
|
||||||
Map<String, Object> res = new HashMap<>();
|
Map<String, Object> res = new HashMap<>();
|
||||||
for (String field : dp.names()) {
|
for (String field : dp.names()) {
|
||||||
Value val = dp.getValue(field);
|
Value val = dp.getValue(field);
|
||||||
|
@ -15,10 +15,10 @@
|
|||||||
*/
|
*/
|
||||||
package inr.numass.utils;
|
package inr.numass.utils;
|
||||||
|
|
||||||
import hep.dataforge.tables.DataPoint;
|
|
||||||
import hep.dataforge.tables.ListTable;
|
import hep.dataforge.tables.ListTable;
|
||||||
import hep.dataforge.tables.MapPoint;
|
|
||||||
import hep.dataforge.tables.Table;
|
import hep.dataforge.tables.Table;
|
||||||
|
import hep.dataforge.tables.ValueMap;
|
||||||
|
import hep.dataforge.values.Values;
|
||||||
import inr.numass.data.SpectrumDataAdapter;
|
import inr.numass.data.SpectrumDataAdapter;
|
||||||
|
|
||||||
import java.util.Scanner;
|
import java.util.Scanner;
|
||||||
@ -37,7 +37,7 @@ public class DataModelUtils {
|
|||||||
for (int i = 0; i < numpoints; i++) {
|
for (int i = 0; i < numpoints; i++) {
|
||||||
// формула работает даже в том случае когда порядок точек обратный
|
// формула работает даже в том случае когда порядок точек обратный
|
||||||
double x = from + (to - from) / (numpoints - 1) * i;
|
double x = from + (to - from) / (numpoints - 1) * i;
|
||||||
DataPoint point = new MapPoint(list, x, time);
|
Values point = new ValueMap(list, x, time);
|
||||||
res.row(point);
|
res.row(point);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -51,7 +51,7 @@ public class DataModelUtils {
|
|||||||
while (scan.hasNextLine()) {
|
while (scan.hasNextLine()) {
|
||||||
double x = scan.nextDouble();
|
double x = scan.nextDouble();
|
||||||
int time = scan.nextInt();
|
int time = scan.nextInt();
|
||||||
res.row(new MapPoint(list, x, time));
|
res.row(new ValueMap(list, x, time));
|
||||||
}
|
}
|
||||||
return res.build();
|
return res.build();
|
||||||
}
|
}
|
||||||
|
@ -16,10 +16,10 @@
|
|||||||
package inr.numass.utils;
|
package inr.numass.utils;
|
||||||
|
|
||||||
import hep.dataforge.context.Global;
|
import hep.dataforge.context.Global;
|
||||||
import hep.dataforge.tables.DataPoint;
|
|
||||||
import hep.dataforge.tables.ListTable;
|
import hep.dataforge.tables.ListTable;
|
||||||
import hep.dataforge.tables.MapPoint;
|
|
||||||
import hep.dataforge.tables.Table;
|
import hep.dataforge.tables.Table;
|
||||||
|
import hep.dataforge.tables.ValueMap;
|
||||||
|
import hep.dataforge.values.Values;
|
||||||
import inr.numass.data.SpectrumDataAdapter;
|
import inr.numass.data.SpectrumDataAdapter;
|
||||||
|
|
||||||
import java.io.File;
|
import java.io.File;
|
||||||
@ -51,7 +51,7 @@ public class OldDataReader {
|
|||||||
if (lineScan.hasNextDouble()) {
|
if (lineScan.hasNextDouble()) {
|
||||||
ushift = lineScan.nextDouble();
|
ushift = lineScan.nextDouble();
|
||||||
}
|
}
|
||||||
DataPoint point = new MapPoint(list, u, time, ushift);
|
Values point = new ValueMap(list, u, time, ushift);
|
||||||
res.row(point);
|
res.row(point);
|
||||||
}
|
}
|
||||||
return res.build();
|
return res.build();
|
||||||
@ -100,7 +100,7 @@ public class OldDataReader {
|
|||||||
dummy = sc.nextDouble();
|
dummy = sc.nextDouble();
|
||||||
dummy = sc.nextDouble();
|
dummy = sc.nextDouble();
|
||||||
dummy = sc.nextDouble();
|
dummy = sc.nextDouble();
|
||||||
DataPoint point = factory.buildSpectrumDataPoint(x, count, time);
|
Values point = factory.buildSpectrumDataPoint(x, count, time);
|
||||||
if (x >= Elow) {
|
if (x >= Elow) {
|
||||||
res.row(point);
|
res.row(point);
|
||||||
}
|
}
|
||||||
@ -134,7 +134,7 @@ public class OldDataReader {
|
|||||||
count = sc.nextLong();
|
count = sc.nextLong();
|
||||||
dummy = sc.nextDouble();
|
dummy = sc.nextDouble();
|
||||||
dummy = sc.nextDouble();
|
dummy = sc.nextDouble();
|
||||||
DataPoint point = factory.buildSpectrumDataPoint(x, count, time);
|
Values point = factory.buildSpectrumDataPoint(x, count, time);
|
||||||
if (x > Elow) {
|
if (x > Elow) {
|
||||||
res.row(point);
|
res.row(point);
|
||||||
}
|
}
|
||||||
@ -176,7 +176,7 @@ public class OldDataReader {
|
|||||||
count = lsc.nextDouble();
|
count = lsc.nextDouble();
|
||||||
cr = lsc.nextDouble();
|
cr = lsc.nextDouble();
|
||||||
crErr = lsc.nextDouble();
|
crErr = lsc.nextDouble();
|
||||||
DataPoint point = factory.buildSpectrumDataPoint(x, (long) (cr * time), crErr * time, time);
|
Values point = factory.buildSpectrumDataPoint(x, (long) (cr * time), crErr * time, time);
|
||||||
// SpectrumDataPoint point = new SpectrumDataPoint(x, (long) count, time);
|
// SpectrumDataPoint point = new SpectrumDataPoint(x, (long) count, time);
|
||||||
|
|
||||||
res.row(point);
|
res.row(point);
|
||||||
|
@ -5,10 +5,10 @@
|
|||||||
*/
|
*/
|
||||||
package inr.numass.utils;
|
package inr.numass.utils;
|
||||||
|
|
||||||
import hep.dataforge.tables.DataPoint;
|
|
||||||
import hep.dataforge.tables.ListTable;
|
import hep.dataforge.tables.ListTable;
|
||||||
import hep.dataforge.tables.MapPoint;
|
|
||||||
import hep.dataforge.tables.Table;
|
import hep.dataforge.tables.Table;
|
||||||
|
import hep.dataforge.tables.ValueMap;
|
||||||
|
import hep.dataforge.values.Values;
|
||||||
import inr.numass.data.NumassPoint;
|
import inr.numass.data.NumassPoint;
|
||||||
import org.apache.commons.math3.analysis.ParametricUnivariateFunction;
|
import org.apache.commons.math3.analysis.ParametricUnivariateFunction;
|
||||||
import org.apache.commons.math3.exception.DimensionMismatchException;
|
import org.apache.commons.math3.exception.DimensionMismatchException;
|
||||||
@ -62,13 +62,13 @@ public class UnderflowCorrection {
|
|||||||
// return builder.build();
|
// return builder.build();
|
||||||
// }
|
// }
|
||||||
|
|
||||||
public DataPoint fitPoint(NumassPoint point, int xLow, int xHigh, int upper, int binning) {
|
public Values fitPoint(NumassPoint point, int xLow, int xHigh, int upper, int binning) {
|
||||||
double norm = ((double) point.getCountInWindow(xLow, upper)) / point.getLength();
|
double norm = ((double) point.getCountInWindow(xLow, upper)) / point.getLength();
|
||||||
double[] fitRes = getUnderflowExpParameters(point, xLow, xHigh, binning);
|
double[] fitRes = getUnderflowExpParameters(point, xLow, xHigh, binning);
|
||||||
double a = fitRes[0];
|
double a = fitRes[0];
|
||||||
double sigma = fitRes[1];
|
double sigma = fitRes[1];
|
||||||
|
|
||||||
return new MapPoint(pointNames, point.getVoltage(), a, sigma, a * sigma * Math.exp(xLow / sigma) / norm + 1d);
|
return new ValueMap(pointNames, point.getVoltage(), a, sigma, a * sigma * Math.exp(xLow / sigma) / norm + 1d);
|
||||||
}
|
}
|
||||||
|
|
||||||
public Table fitAllPoints(Iterable<NumassPoint> data, int xLow, int xHigh, int upper, int binning) {
|
public Table fitAllPoints(Iterable<NumassPoint> data, int xLow, int xHigh, int upper, int binning) {
|
||||||
|
@ -13,7 +13,7 @@ import hep.dataforge.meta.MetaBuilder
|
|||||||
import hep.dataforge.storage.commons.JSONMetaWriter
|
import hep.dataforge.storage.commons.JSONMetaWriter
|
||||||
import hep.dataforge.storage.commons.LoaderFactory
|
import hep.dataforge.storage.commons.LoaderFactory
|
||||||
import hep.dataforge.storage.commons.StorageManager
|
import hep.dataforge.storage.commons.StorageManager
|
||||||
import hep.dataforge.tables.MapPoint
|
import hep.dataforge.tables.ValueMap
|
||||||
import inr.numass.client.NumassClient
|
import inr.numass.client.NumassClient
|
||||||
|
|
||||||
new StorageManager().startGlobal();
|
new StorageManager().startGlobal();
|
||||||
@ -38,7 +38,7 @@ new NumassClient("127.0.0.1",8335).withCloseable{
|
|||||||
|
|
||||||
|
|
||||||
for(int i = 0; i<5; i++){
|
for(int i = 0; i<5; i++){
|
||||||
data.putNode(DataPoint.toMeta(new MapPoint(names,i, 2*i,3*i)));
|
data.putNode(DataPoint.toMeta(new ValueMap(names,i, 2*i,3*i)));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -33,9 +33,9 @@ import hep.dataforge.plots.jfreechart.JFreeChartFrame;
|
|||||||
import hep.dataforge.storage.api.PointLoader;
|
import hep.dataforge.storage.api.PointLoader;
|
||||||
import hep.dataforge.storage.api.Storage;
|
import hep.dataforge.storage.api.Storage;
|
||||||
import hep.dataforge.storage.commons.StorageUtils;
|
import hep.dataforge.storage.commons.StorageUtils;
|
||||||
import hep.dataforge.tables.DataPoint;
|
import hep.dataforge.tables.ValueMap;
|
||||||
import hep.dataforge.tables.MapPoint;
|
|
||||||
import hep.dataforge.values.Value;
|
import hep.dataforge.values.Value;
|
||||||
|
import hep.dataforge.values.Values;
|
||||||
import javafx.application.Platform;
|
import javafx.application.Platform;
|
||||||
import javafx.fxml.FXML;
|
import javafx.fxml.FXML;
|
||||||
import javafx.fxml.FXMLLoader;
|
import javafx.fxml.FXMLLoader;
|
||||||
@ -133,7 +133,7 @@ public class MspViewController implements Encapsulated {
|
|||||||
|
|
||||||
Collection<String> names = joinNames(loaders);
|
Collection<String> names = joinNames(loaders);
|
||||||
|
|
||||||
Stream<DataPoint> stream = loaders.stream().flatMap(loader -> getLoaderData(loader));
|
Stream<Values> stream = loaders.stream().flatMap(loader -> getLoaderData(loader));
|
||||||
|
|
||||||
|
|
||||||
updateMspPane(PlotDataUtils.buildGroup("timestamp", names, stream));
|
updateMspPane(PlotDataUtils.buildGroup("timestamp", names, stream));
|
||||||
@ -154,15 +154,15 @@ public class MspViewController implements Encapsulated {
|
|||||||
return nameSet;
|
return nameSet;
|
||||||
}
|
}
|
||||||
|
|
||||||
private Stream<DataPoint> getLoaderData(PointLoader loader) {
|
private Stream<Values> getLoaderData(PointLoader loader) {
|
||||||
try {
|
try {
|
||||||
loader.open();
|
loader.open();
|
||||||
List<DataPoint> points = new ArrayList<>();
|
List<Values> points = new ArrayList<>();
|
||||||
// callback.updateStatus("Loading mass spectrometer data from " + loader.getName());
|
// callback.updateStatus("Loading mass spectrometer data from " + loader.getName());
|
||||||
|
|
||||||
DataPoint last = null;
|
Values last = null;
|
||||||
|
|
||||||
for (DataPoint dp : loader) {
|
for (Values dp : loader) {
|
||||||
points.add(dp);
|
points.add(dp);
|
||||||
last = dp;
|
last = dp;
|
||||||
}
|
}
|
||||||
@ -241,8 +241,8 @@ public class MspViewController implements Encapsulated {
|
|||||||
* @param last
|
* @param last
|
||||||
* @return
|
* @return
|
||||||
*/
|
*/
|
||||||
private DataPoint terminatorPoint(DataPoint last) {
|
private Values terminatorPoint(Values last) {
|
||||||
MapPoint.Builder p = new MapPoint.Builder();
|
ValueMap.Builder p = new ValueMap.Builder();
|
||||||
p.putValue("timestamp", last.getValue("timestamp").timeValue().plusMillis(10));
|
p.putValue("timestamp", last.getValue("timestamp").timeValue().plusMillis(10));
|
||||||
for (String name : last.namesAsArray()) {
|
for (String name : last.namesAsArray()) {
|
||||||
if (!name.equals("timestamp")) {
|
if (!name.equals("timestamp")) {
|
||||||
|
@ -13,10 +13,10 @@ import hep.dataforge.plots.data.TimePlottable
|
|||||||
import hep.dataforge.plots.fx.PlotContainer
|
import hep.dataforge.plots.fx.PlotContainer
|
||||||
import hep.dataforge.plots.jfreechart.JFreeChartFrame
|
import hep.dataforge.plots.jfreechart.JFreeChartFrame
|
||||||
import hep.dataforge.storage.commons.JSONMetaWriter
|
import hep.dataforge.storage.commons.JSONMetaWriter
|
||||||
import hep.dataforge.tables.DataPoint
|
|
||||||
import hep.dataforge.tables.ListTable
|
import hep.dataforge.tables.ListTable
|
||||||
import hep.dataforge.tables.MapPoint
|
import hep.dataforge.tables.ValueMap
|
||||||
import hep.dataforge.tables.XYAdapter
|
import hep.dataforge.tables.XYAdapter
|
||||||
|
import hep.dataforge.values.Values
|
||||||
import inr.numass.data.NumassData
|
import inr.numass.data.NumassData
|
||||||
import inr.numass.data.NumassDataUtils
|
import inr.numass.data.NumassDataUtils
|
||||||
import inr.numass.data.NumassPoint
|
import inr.numass.data.NumassPoint
|
||||||
@ -254,7 +254,7 @@ class NumassLoaderView : View() {
|
|||||||
|
|
||||||
spectrumData.fillData(data.nmPoints.stream()
|
spectrumData.fillData(data.nmPoints.stream()
|
||||||
.map { point: NumassPoint -> getSpectrumPoint(point, lowChannel, highChannel, dTime) }
|
.map { point: NumassPoint -> getSpectrumPoint(point, lowChannel, highChannel, dTime) }
|
||||||
.collect(Collectors.toList<DataPoint>())
|
.collect(Collectors.toList<Values>())
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -268,9 +268,9 @@ class NumassLoaderView : View() {
|
|||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private fun getSpectrumPoint(point: NumassPoint, lowChannel: Int, upChannel: Int, dTime: Double): DataPoint {
|
private fun getSpectrumPoint(point: NumassPoint, lowChannel: Int, upChannel: Int, dTime: Double): Values {
|
||||||
val u = point.voltage
|
val u = point.voltage
|
||||||
return MapPoint(arrayOf(XYAdapter.X_VALUE_KEY, XYAdapter.Y_VALUE_KEY, XYAdapter.Y_ERROR_KEY), u,
|
return ValueMap(arrayOf(XYAdapter.X_VALUE_KEY, XYAdapter.Y_VALUE_KEY, XYAdapter.Y_ERROR_KEY), u,
|
||||||
NumassDataUtils.countRateWithDeadTime(point, lowChannel, upChannel, dTime),
|
NumassDataUtils.countRateWithDeadTime(point, lowChannel, upChannel, dTime),
|
||||||
NumassDataUtils.countRateWithDeadTimeErr(point, lowChannel, upChannel, dTime))
|
NumassDataUtils.countRateWithDeadTimeErr(point, lowChannel, upChannel, dTime))
|
||||||
}
|
}
|
||||||
|
@ -9,10 +9,10 @@ import hep.dataforge.plots.fx.PlotContainer
|
|||||||
import hep.dataforge.plots.jfreechart.JFreeChartFrame
|
import hep.dataforge.plots.jfreechart.JFreeChartFrame
|
||||||
import hep.dataforge.storage.api.PointLoader
|
import hep.dataforge.storage.api.PointLoader
|
||||||
import hep.dataforge.storage.api.ValueIndex
|
import hep.dataforge.storage.api.ValueIndex
|
||||||
import hep.dataforge.tables.DataPoint
|
|
||||||
import hep.dataforge.tables.ListTable
|
import hep.dataforge.tables.ListTable
|
||||||
import hep.dataforge.tables.Table
|
import hep.dataforge.tables.Table
|
||||||
import hep.dataforge.tables.XYAdapter
|
import hep.dataforge.tables.XYAdapter
|
||||||
|
import hep.dataforge.values.Values
|
||||||
import tornadofx.*
|
import tornadofx.*
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -49,7 +49,7 @@ class SlowControlView : View("My View") {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private fun getData(loader: PointLoader, query: Meta = Meta.empty()): Table {
|
private fun getData(loader: PointLoader, query: Meta = Meta.empty()): Table {
|
||||||
val index: ValueIndex<DataPoint> =
|
val index: ValueIndex<Values> =
|
||||||
if (query.hasValue("index")) {
|
if (query.hasValue("index")) {
|
||||||
//use custom index if needed
|
//use custom index if needed
|
||||||
loader.getIndex(query.getString("index"))
|
loader.getIndex(query.getString("index"))
|
||||||
|
@ -3,7 +3,7 @@ package inr.numass.viewer.test
|
|||||||
import hep.dataforge.plots.data.PlottableData
|
import hep.dataforge.plots.data.PlottableData
|
||||||
import hep.dataforge.plots.fx.PlotContainer
|
import hep.dataforge.plots.fx.PlotContainer
|
||||||
import hep.dataforge.plots.jfreechart.JFreeChartFrame
|
import hep.dataforge.plots.jfreechart.JFreeChartFrame
|
||||||
import hep.dataforge.tables.MapPoint
|
import hep.dataforge.tables.ValueMap
|
||||||
import hep.dataforge.tables.XYAdapter
|
import hep.dataforge.tables.XYAdapter
|
||||||
import tornadofx.*
|
import tornadofx.*
|
||||||
import java.util.*
|
import java.util.*
|
||||||
@ -22,7 +22,7 @@ class JFCTest : View("My View") {
|
|||||||
action {
|
action {
|
||||||
|
|
||||||
data.fillData(
|
data.fillData(
|
||||||
(1..1000).map { MapPoint(arrayOf(XYAdapter.X_VALUE_KEY, XYAdapter.Y_VALUE_KEY), it, rnd.nextDouble()) }
|
(1..1000).map { ValueMap(arrayOf(XYAdapter.X_VALUE_KEY, XYAdapter.Y_VALUE_KEY), it, rnd.nextDouble()) }
|
||||||
)
|
)
|
||||||
plot.add(data)
|
plot.add(data)
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user