numass update
This commit is contained in:
parent
7cdd4286aa
commit
d7689a7444
@ -19,9 +19,10 @@ import static inr.numass.data.api.NumassPoint.HV_KEY;
|
||||
*/
|
||||
public abstract class AbstractAnalyzer implements NumassAnalyzer {
|
||||
public static String WINDOW_KEY = "window";
|
||||
public static String TIME_KEY = "timestamp";
|
||||
|
||||
public static String[] NAME_LIST = {LENGTH_KEY, COUNT_KEY, COUNT_RATE_KEY, COUNT_RATE_ERROR_KEY, WINDOW_KEY, "timestamp"};
|
||||
public static String[] NAME_LIST_WITH_HV = {HV_KEY, LENGTH_KEY, COUNT_KEY, COUNT_RATE_KEY, COUNT_RATE_ERROR_KEY, WINDOW_KEY, "timestamp"};
|
||||
public static String[] NAME_LIST = {LENGTH_KEY, COUNT_KEY, COUNT_RATE_KEY, COUNT_RATE_ERROR_KEY, WINDOW_KEY, TIME_KEY};
|
||||
public static String[] NAME_LIST_WITH_HV = {HV_KEY, LENGTH_KEY, COUNT_KEY, COUNT_RATE_KEY, COUNT_RATE_ERROR_KEY, WINDOW_KEY, TIME_KEY};
|
||||
@Nullable
|
||||
private final SignalProcessor processor;
|
||||
|
||||
@ -64,10 +65,13 @@ public abstract class AbstractAnalyzer implements NumassAnalyzer {
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public Table analyze(NumassSet set, Meta config) {
|
||||
TableFormat format = new TableFormatBuilder()
|
||||
/**
|
||||
* Get table format for summary table
|
||||
* @param config
|
||||
* @return
|
||||
*/
|
||||
protected TableFormat getTableFormat(Meta config){
|
||||
return new TableFormatBuilder()
|
||||
.addNumber(HV_KEY, X_VALUE_KEY)
|
||||
.addNumber(LENGTH_KEY)
|
||||
.addNumber(COUNT_KEY)
|
||||
@ -76,6 +80,12 @@ public abstract class AbstractAnalyzer implements NumassAnalyzer {
|
||||
.addColumn(WINDOW_KEY)
|
||||
.addTime()
|
||||
.build();
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public Table analyze(NumassSet set, Meta config) {
|
||||
TableFormat format = getTableFormat(config);
|
||||
|
||||
return new ListTable.Builder(format)
|
||||
.rows(set.getPoints().map(point -> analyze(point, config)))
|
||||
|
@ -27,13 +27,14 @@ public class SimpleAnalyzer extends AbstractAnalyzer {
|
||||
int loChannel = config.getInt("window.lo", 0);
|
||||
int upChannel = config.getInt("window.up", Integer.MAX_VALUE);
|
||||
long count = getEvents(block, config).count();
|
||||
double countRate = (double) count / block.getLength().toMillis() * 1000;
|
||||
double countRateError = Math.sqrt((double) count) / block.getLength().toMillis() * 1000;
|
||||
double length = (double) block.getLength().toNanos()/1e9;
|
||||
double countRate = (double) count / length;
|
||||
double countRateError = Math.sqrt((double) count) / length;
|
||||
|
||||
if (block instanceof NumassPoint) {
|
||||
return ValueMap.of(NAME_LIST_WITH_HV,
|
||||
((NumassPoint) block).getVoltage(),
|
||||
block.getLength().toNanos(),
|
||||
length,
|
||||
count,
|
||||
countRate,
|
||||
countRateError,
|
||||
@ -41,7 +42,7 @@ public class SimpleAnalyzer extends AbstractAnalyzer {
|
||||
block.getStartTime());
|
||||
} else {
|
||||
return ValueMap.of(NAME_LIST,
|
||||
block.getLength().toNanos(),
|
||||
length,
|
||||
count,
|
||||
countRate,
|
||||
countRateError,
|
||||
|
@ -1,11 +1,18 @@
|
||||
package inr.numass.data.analyzers;
|
||||
|
||||
import hep.dataforge.description.ValueDef;
|
||||
import hep.dataforge.meta.Meta;
|
||||
import hep.dataforge.tables.TableFormat;
|
||||
import hep.dataforge.tables.ValueMap;
|
||||
import hep.dataforge.values.Value;
|
||||
import hep.dataforge.values.ValueType;
|
||||
import hep.dataforge.values.Values;
|
||||
import inr.numass.data.api.NumassBlock;
|
||||
import inr.numass.data.api.SignalProcessor;
|
||||
import org.jetbrains.annotations.Nullable;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* An analyzer dispatcher which uses different analyzer for different meta
|
||||
* Created by darksnake on 11.07.2017.
|
||||
@ -27,7 +34,6 @@ public class SmartAnalyzer extends AbstractAnalyzer {
|
||||
|
||||
@Override
|
||||
public Values analyze(NumassBlock block, Meta config) {
|
||||
//TODO add result caching
|
||||
if (config.hasValue("type")) {
|
||||
switch (config.getString("type")) {
|
||||
case "simple":
|
||||
@ -40,11 +46,46 @@ public class SmartAnalyzer extends AbstractAnalyzer {
|
||||
throw new IllegalArgumentException("Analyzer not found");
|
||||
}
|
||||
} else {
|
||||
if(config.hasValue("t0")){
|
||||
return timeAnalyzer.analyze(block,config);
|
||||
int t0 = getT0(block, config);
|
||||
if (t0 == 0) {
|
||||
Map<String, Value> map = simpleAnalyzer.analyze(block, config).asMap();
|
||||
map.putIfAbsent(TimeAnalyzer.T0_KEY, Value.of(0d));
|
||||
return new ValueMap(map);
|
||||
} else {
|
||||
return simpleAnalyzer.analyze(block,config);
|
||||
return timeAnalyzer.analyze(block, config.getBuilder().putValue(TimeAnalyzer.T0_KEY, t0));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private double estimateCountRate(NumassBlock block) {
|
||||
return (double) block.getEvents().count() / block.getLength().toMillis() * 1000;
|
||||
}
|
||||
|
||||
@ValueDef(name = "t0", type = ValueType.NUMBER, info = "Constant t0 cut")
|
||||
@ValueDef(name = "t0.crFraction", type = ValueType.NUMBER, info = "The relative fraction of events that should be removed by time cut")
|
||||
@ValueDef(name = "t0.min", type = ValueType.NUMBER, def = "0", info = "Minimal t0")
|
||||
private int getT0(NumassBlock block, Meta meta) {
|
||||
if (meta.hasValue("t0")) {
|
||||
return meta.getInt("t0");
|
||||
} else if (meta.hasMeta("t0")) {
|
||||
double fraction = meta.getDouble("t0.crFraction");
|
||||
double cr = estimateCountRate(block);
|
||||
if (cr < meta.getDouble("t0.minCR", 0)) {
|
||||
return 0;
|
||||
} else {
|
||||
return (int) Math.max(-1e9 / cr * Math.log(1d - fraction), meta.getDouble("t0.min", 0));
|
||||
}
|
||||
} else {
|
||||
return 0;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
protected TableFormat getTableFormat(Meta config) {
|
||||
if (config.hasValue(TimeAnalyzer.T0_KEY) || config.hasMeta(TimeAnalyzer.T0_KEY)) {
|
||||
return timeAnalyzer.getTableFormat(config);
|
||||
}
|
||||
return super.getTableFormat(config);
|
||||
}
|
||||
}
|
||||
|
@ -1,6 +1,8 @@
|
||||
package inr.numass.data.analyzers;
|
||||
|
||||
import hep.dataforge.meta.Meta;
|
||||
import hep.dataforge.tables.TableFormat;
|
||||
import hep.dataforge.tables.TableFormatBuilder;
|
||||
import hep.dataforge.tables.ValueMap;
|
||||
import hep.dataforge.values.Values;
|
||||
import inr.numass.data.api.NumassBlock;
|
||||
@ -14,11 +16,18 @@ import java.util.concurrent.atomic.AtomicLong;
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import static hep.dataforge.tables.XYAdapter.*;
|
||||
import static inr.numass.data.api.NumassPoint.HV_KEY;
|
||||
|
||||
/**
|
||||
* An analyzer which uses time information from events
|
||||
* Created by darksnake on 11.07.2017.
|
||||
*/
|
||||
public class TimeAnalyzer extends AbstractAnalyzer {
|
||||
public static String T0_KEY = "t0";
|
||||
|
||||
public static String[] NAME_LIST = {LENGTH_KEY, COUNT_KEY, COUNT_RATE_KEY, COUNT_RATE_ERROR_KEY, WINDOW_KEY, TIME_KEY, T0_KEY};
|
||||
public static String[] NAME_LIST_WITH_HV = {HV_KEY, LENGTH_KEY, COUNT_KEY, COUNT_RATE_KEY, COUNT_RATE_ERROR_KEY, WINDOW_KEY, TIME_KEY, T0_KEY};
|
||||
|
||||
public TimeAnalyzer(@Nullable SignalProcessor processor) {
|
||||
super(processor);
|
||||
@ -37,6 +46,7 @@ public class TimeAnalyzer extends AbstractAnalyzer {
|
||||
AtomicLong totalT = new AtomicLong(0);
|
||||
|
||||
getEventsWithDelay(block, config)
|
||||
.filter(pair -> pair.getValue() >= t0)
|
||||
.forEach(pair -> {
|
||||
totalN.incrementAndGet();
|
||||
//TODO add progress listener here
|
||||
@ -45,8 +55,9 @@ public class TimeAnalyzer extends AbstractAnalyzer {
|
||||
|
||||
double countRate = 1e6 * totalN.get() / (totalT.get() / 1000 - t0 * totalN.get() / 1000);//1e9 / (totalT.get() / totalN.get() - t0);
|
||||
double countRateError = countRate / Math.sqrt(totalN.get());
|
||||
long count = (long) (totalT.get() * (countRate / 1e9));
|
||||
double length = totalT.get();
|
||||
double length = totalT.get() / 1e9;
|
||||
long count = (long) (length * countRate);
|
||||
|
||||
|
||||
if (block instanceof NumassPoint) {
|
||||
return ValueMap.of(NAME_LIST_WITH_HV,
|
||||
@ -56,7 +67,9 @@ public class TimeAnalyzer extends AbstractAnalyzer {
|
||||
countRate,
|
||||
countRateError,
|
||||
new Integer[]{loChannel, upChannel},
|
||||
block.getStartTime());
|
||||
block.getStartTime(),
|
||||
(double)t0 / 1000d
|
||||
);
|
||||
} else {
|
||||
return ValueMap.of(NAME_LIST,
|
||||
length,
|
||||
@ -64,7 +77,8 @@ public class TimeAnalyzer extends AbstractAnalyzer {
|
||||
countRate,
|
||||
countRateError,
|
||||
new Integer[]{loChannel, upChannel},
|
||||
block.getStartTime()
|
||||
block.getStartTime(),
|
||||
(double)t0 / 1000d
|
||||
);
|
||||
}
|
||||
}
|
||||
@ -81,37 +95,46 @@ public class TimeAnalyzer extends AbstractAnalyzer {
|
||||
* @return
|
||||
*/
|
||||
public Stream<Pair<NumassEvent, Long>> getEventsWithDelay(NumassBlock block, Meta config) {
|
||||
long t0 = getT0(block, config);
|
||||
|
||||
AtomicReference<NumassEvent> lastEvent = new AtomicReference<>(null);
|
||||
|
||||
Stream<NumassEvent> eventStream = super.getEvents(block, config);//using super implementation
|
||||
|
||||
return eventStream.map(event -> {
|
||||
if (lastEvent.get() == null) {
|
||||
lastEvent.set(event);
|
||||
return new Pair<>(event, 0L);
|
||||
} else {
|
||||
long res = event.getTimeOffset() - lastEvent.get().getTimeOffset();
|
||||
if (res >= 0) {
|
||||
long res = lastEvent.get() == null ? -1L : event.getTimeOffset() - lastEvent.get().getTimeOffset();
|
||||
|
||||
if (res < 0) {
|
||||
res = 0L;
|
||||
}
|
||||
|
||||
lastEvent.set(event);
|
||||
return new Pair<>(event, res);
|
||||
} else {
|
||||
lastEvent.set(null);
|
||||
return new Pair<>(event, 0L);
|
||||
}
|
||||
}
|
||||
}).filter(pair -> pair.getValue() >= t0);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* The filtered stream of events
|
||||
*
|
||||
* @param block
|
||||
* @param config
|
||||
* @return
|
||||
*/
|
||||
@Override
|
||||
public Stream<NumassEvent> getEvents(NumassBlock block, Meta config) {
|
||||
return getEventsWithDelay(block, config).map(Pair::getKey);
|
||||
long t0 = getT0(block, config);
|
||||
return getEventsWithDelay(block, config).filter(pair -> pair.getValue() >= t0).map(Pair::getKey);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected TableFormat getTableFormat(Meta config) {
|
||||
return new TableFormatBuilder()
|
||||
.addNumber(HV_KEY, X_VALUE_KEY)
|
||||
.addNumber(LENGTH_KEY)
|
||||
.addNumber(COUNT_KEY)
|
||||
.addNumber(COUNT_RATE_KEY, Y_VALUE_KEY)
|
||||
.addNumber(COUNT_RATE_ERROR_KEY, Y_ERROR_KEY)
|
||||
.addColumn(WINDOW_KEY)
|
||||
.addTime()
|
||||
.addNumber(T0_KEY)
|
||||
.build();
|
||||
}
|
||||
}
|
||||
|
@ -25,20 +25,20 @@ ctx.pluginManager().load(NumassPlugin.class)
|
||||
|
||||
new GrindShell(ctx).eval {
|
||||
PlotHelper plot = plots
|
||||
File rootDir = new File("D:\\Work\\Numass\\data\\2017_05\\Fill_3")
|
||||
File rootDir = new File("D:\\Work\\Numass\\data\\2017_05\\Fill_2")
|
||||
|
||||
NumassStorage storage = NumassStorageFactory.buildLocal(rootDir);
|
||||
|
||||
|
||||
def set = "set_43"
|
||||
def hv = 16000;
|
||||
def set = "set_2"
|
||||
def hv = 18300;
|
||||
def loader = storage.provide("loader::$set", NumassSet.class).get();
|
||||
def point = loader.provide("$hv", NumassPoint.class).get()
|
||||
|
||||
def loChannel = 500;
|
||||
def upChannel = 2000;
|
||||
def loChannel = 450;
|
||||
def upChannel = 3100;
|
||||
|
||||
def histogram = PointAnalyzer.histogram(point, loChannel, upChannel, 0.7, 1000).asTable();
|
||||
def histogram = PointAnalyzer.histogram(point, loChannel, upChannel, 1, 500).asTable();
|
||||
|
||||
println "finished histogram calculation..."
|
||||
|
||||
|
@ -99,16 +99,16 @@ public class MergeDataAction extends ManyToOneAction<Table, Table> {
|
||||
return dp1;
|
||||
}
|
||||
|
||||
double Uset = dp1.getValue(parnames[0]).doubleValue();
|
||||
double voltage = dp1.getValue(NumassPoint.HV_KEY).doubleValue();
|
||||
//усредняем измеренное напряжение
|
||||
double Uread = (dp1.getValue(parnames[1]).doubleValue() + dp2.getValue(parnames[1]).doubleValue()) / 2;
|
||||
// double Uread = (dp1.getValue(parnames[1]).doubleValue() + dp2.getValue(parnames[1]).doubleValue()) / 2;
|
||||
|
||||
double t1 = dp1.getValue(NumassPoint.LENGTH_KEY).doubleValue();
|
||||
double t2 = dp2.getValue(NumassPoint.LENGTH_KEY).doubleValue();
|
||||
double time = t1 + t2;
|
||||
|
||||
long total = dp1.getValue(parnames[3]).intValue() + dp2.getValue(parnames[3]).intValue();
|
||||
long wind = dp1.getValue(parnames[4]).intValue() + dp2.getValue(parnames[4]).intValue();
|
||||
long total = dp1.getValue(NumassAnalyzer.COUNT_KEY).intValue() + dp2.getValue(NumassAnalyzer.COUNT_KEY).intValue();
|
||||
// long wind = dp1.getValue(parnames[4]).intValue() + dp2.getValue(parnames[4]).intValue();
|
||||
// double corr = dp1.getValue(parnames[5]).doubleValue() + dp2.getValue(parnames[5]).doubleValue();
|
||||
|
||||
double cr1 = dp1.getValue(NumassAnalyzer.COUNT_RATE_KEY).doubleValue();
|
||||
@ -122,7 +122,7 @@ public class MergeDataAction extends ManyToOneAction<Table, Table> {
|
||||
// абсолютные ошибки складываются квадратично
|
||||
double crErr = Math.sqrt(err1 * err1 * t1 * t1 + err2 * err2 * t2 * t2) / time;
|
||||
|
||||
ValueMap.Builder map = ValueMap.of(parnames, Uset, time, total, cr, crErr).builder();
|
||||
ValueMap.Builder map = ValueMap.of(parnames, voltage, time, total, cr, crErr).builder();
|
||||
|
||||
// if (dp1.getNames().contains("relCR") && dp2.getNames().contains("relCR")) {
|
||||
// double relCR = (dp1.getDouble("relCR") + dp2.getDouble("relCR")) / 2;
|
||||
@ -141,7 +141,7 @@ public class MergeDataAction extends ManyToOneAction<Table, Table> {
|
||||
throw new IllegalArgumentException();
|
||||
}
|
||||
for (Values dp : d) {
|
||||
double uset = dp.getValue(parnames[0]).doubleValue();
|
||||
double uset = dp.getValue(NumassPoint.HV_KEY).doubleValue();
|
||||
if (!points.containsKey(uset)) {
|
||||
points.put(uset, new ArrayList<>());
|
||||
}
|
||||
|
@ -53,6 +53,8 @@ public class LossCalculator {
|
||||
private final Map<Integer, UnivariateFunction> cache = new HashMap<>();
|
||||
|
||||
|
||||
|
||||
|
||||
private LossCalculator() {
|
||||
cache.put(1, getSingleScatterFunction());
|
||||
// cache.put(2, getDoubleScatterFunction());
|
||||
@ -386,7 +388,7 @@ public class LossCalculator {
|
||||
return integrator.integrate(integrand, 5d, margin);
|
||||
};
|
||||
|
||||
return FunctionCaching.cacheUnivariateFunction(res, 0, margin, 200);
|
||||
return FunctionCaching.cacheUnivariateFunction(0, margin, 200, res);
|
||||
|
||||
}
|
||||
|
||||
|
@ -13,13 +13,13 @@ import hep.dataforge.values.Values;
|
||||
import inr.numass.models.LossCalculator;
|
||||
import inr.numass.utils.ExpressionUtils;
|
||||
import org.apache.commons.math3.analysis.BivariateFunction;
|
||||
import org.apache.commons.math3.analysis.UnivariateFunction;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
*
|
||||
* @author <a href="mailto:altavir@gmail.com">Alexander Nozik</a>
|
||||
*/
|
||||
public class NumassTransmission extends AbstractParametricBiFunction {
|
||||
@ -28,6 +28,7 @@ public class NumassTransmission extends AbstractParametricBiFunction {
|
||||
private static final double ION_POTENTIAL = 15.4;//eV
|
||||
private final LossCalculator calculator;
|
||||
private final BivariateFunction trapFunc;
|
||||
private Map<Double, UnivariateFunction> lossCache = new HashMap<>();
|
||||
|
||||
private final boolean adjustX;
|
||||
|
||||
@ -93,6 +94,18 @@ public class NumassTransmission extends AbstractParametricBiFunction {
|
||||
double X = getX(eIn, set);
|
||||
// loss part
|
||||
double loss = calculator.getTotalLossValue(X, eIn, eOut);
|
||||
// double loss;
|
||||
//
|
||||
// if(eIn-eOut >= 300){
|
||||
// loss = 0;
|
||||
// } else {
|
||||
// UnivariateFunction lossFunction = this.lossCache.computeIfAbsent(X, theX ->
|
||||
// FunctionCaching.cacheUnivariateFunction(0, 300, 400, x -> calculator.getTotalLossValue(theX, eIn, eIn - x))
|
||||
// );
|
||||
//
|
||||
// loss = lossFunction.value(eIn - eOut);
|
||||
// }
|
||||
|
||||
//trapping part
|
||||
double trap = getParameter("trap", set) * trapFunc.value(eIn, eOut);
|
||||
return loss + trap;
|
||||
|
Loading…
Reference in New Issue
Block a user