A lot of minor fixes. Moving some code to kotlin

This commit is contained in:
Alexander Nozik 2017-12-13 14:57:54 +03:00
parent fc45576ccc
commit 4b323fd145
34 changed files with 898 additions and 600 deletions

View File

@ -5,6 +5,7 @@ plugins {
allprojects{
apply plugin: 'idea'
apply plugin: 'java'
apply plugin: "kotlin"
group = 'inr.numass'
version = '1.0.0'
@ -19,7 +20,12 @@ allprojects{
maven { url "https://dl.bintray.com/kotlin/kotlinx" }
}
apply plugin: "kotlin"
dependencies{
compile "org.jetbrains.kotlin:kotlin-stdlib-jre8:1.2.0"
compile "org.jetbrains.kotlin:kotlin-stdlib-jdk8:1.2.0"
}
compileKotlin {
kotlinOptions {

View File

@ -2,12 +2,6 @@ package inr.numass.data;
import hep.dataforge.meta.Meta;
import hep.dataforge.meta.MetaBuilder;
import hep.dataforge.tables.ListTable;
import hep.dataforge.tables.Table;
import hep.dataforge.tables.TableFormat;
import hep.dataforge.tables.TableFormatBuilder;
import hep.dataforge.values.Value;
import hep.dataforge.values.Values;
import inr.numass.data.api.NumassPoint;
import inr.numass.data.api.NumassSet;
import inr.numass.data.api.SimpleNumassPoint;
@ -16,14 +10,9 @@ import org.jetbrains.annotations.NotNull;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.AtomicReference;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import static hep.dataforge.tables.Adapters.*;
import static inr.numass.data.api.NumassAnalyzer.*;
/**
* Created by darksnake on 30-Jan-17.
*/
@ -52,100 +41,6 @@ public class NumassDataUtils {
};
}
/**
* Subtract reference spectrum.
*
* @param sp1
* @param sp2
* @return
*/
public static Table subtractSpectrum(Table sp1, Table sp2) {
TableFormat format = new TableFormatBuilder()
.addNumber(CHANNEL_KEY, X_VALUE_KEY)
.addNumber(COUNT_RATE_KEY, Y_VALUE_KEY)
.addNumber(COUNT_RATE_ERROR_KEY, Y_ERROR_KEY)
.build();
//indexing table elements
Map<Double, Values> t1 = sp1.getRows().collect(Collectors.toMap(row -> row.getDouble(CHANNEL_KEY), row -> row));
Map<Double, Values> t2 = sp2.getRows().collect(Collectors.toMap(row -> row.getDouble(CHANNEL_KEY), row -> row));
ListTable.Builder builder = new ListTable.Builder(format);
t1.forEach((channel, row1) -> {
Values row2 = t2.get(channel);
if (row2 == null) {
builder.row(row1);
} else {
double value = Math.max(row1.getDouble(COUNT_RATE_KEY) - row2.getDouble(COUNT_RATE_KEY), 0);
double error1 = row1.getDouble(COUNT_RATE_ERROR_KEY);
double error2 = row2.getDouble(COUNT_RATE_ERROR_KEY);
double error = Math.sqrt(error1 * error1 + error2 * error2);
builder.row(channel, value, error);
}
});
return builder.build();
}
/**
* Apply window and binning to a spectrum. Empty bins are filled with zeroes
*
* @param binSize
* @param loChannel autodefined if negative
* @param upChannel autodefined if negative
* @return
*/
public static Table spectrumWithBinning(Table spectrum, int binSize, int loChannel, int upChannel) {
TableFormat format = new TableFormatBuilder()
.addNumber(CHANNEL_KEY, X_VALUE_KEY)
.addNumber(COUNT_KEY, Y_VALUE_KEY)
.addNumber(COUNT_RATE_KEY)
.addNumber(COUNT_RATE_ERROR_KEY)
.addNumber("binSize");
ListTable.Builder builder = new ListTable.Builder(format);
if (loChannel < 0) {
loChannel = spectrum.getColumn(CHANNEL_KEY).stream().mapToInt(Value::intValue).min().orElse(0);
}
if (upChannel < 0) {
upChannel = spectrum.getColumn(CHANNEL_KEY).stream().mapToInt(Value::intValue).max().orElse(1);
}
for (int chan = loChannel; chan < upChannel - binSize; chan += binSize) {
AtomicLong count = new AtomicLong(0);
AtomicReference<Double> countRate = new AtomicReference<>(0d);
AtomicReference<Double> countRateDispersion = new AtomicReference<>(0d);
int binLo = chan;
int binUp = chan + binSize;
spectrum.getRows().filter(row -> {
int c = row.getInt(CHANNEL_KEY);
return c >= binLo && c < binUp;
}).forEach(row -> {
count.addAndGet(row.getValue(COUNT_KEY, 0).longValue());
countRate.accumulateAndGet(row.getDouble(COUNT_RATE_KEY, 0), (d1, d2) -> d1 + d2);
countRateDispersion.accumulateAndGet(Math.pow(row.getDouble(COUNT_RATE_ERROR_KEY, 0),2), (d1, d2) -> d1 + d2);
});
int bin = Math.min(binSize, upChannel - chan);
builder.row((double) chan + (double) bin / 2d, count.get(), countRate.get(), Math.sqrt(countRateDispersion.get()), bin);
}
return builder.build();
}
/**
* The same as above, but with auto definition for borders
*
* @param spectrum
* @param binSize
* @return
*/
public static Table spectrumWithBinning(Table spectrum, int binSize) {
return spectrumWithBinning(spectrum, binSize, -1, -1);
}
@NotNull
public static SpectrumAdapter adapter() {
return new SpectrumAdapter("Uset", "CR", "CRerr", "Time");

View File

@ -1,100 +0,0 @@
package inr.numass.data.analyzers;
import hep.dataforge.meta.Meta;
import hep.dataforge.tables.ListTable;
import hep.dataforge.tables.Table;
import hep.dataforge.tables.TableFormat;
import hep.dataforge.tables.TableFormatBuilder;
import inr.numass.data.api.*;
import org.jetbrains.annotations.Nullable;
import java.util.Comparator;
import java.util.stream.Stream;
import static hep.dataforge.tables.Adapters.*;
import static inr.numass.data.api.NumassPoint.HV_KEY;
/**
* Created by darksnake on 11.07.2017.
*/
public abstract class AbstractAnalyzer implements NumassAnalyzer {
public static final String WINDOW_KEY = "window";
public static final String TIME_KEY = "timestamp";
public static final String[] NAME_LIST = {LENGTH_KEY, COUNT_KEY, COUNT_RATE_KEY, COUNT_RATE_ERROR_KEY, WINDOW_KEY, TIME_KEY};
// public static String[] NAME_LIST_WITH_HV = {HV_KEY, LENGTH_KEY, COUNT_KEY, COUNT_RATE_KEY, COUNT_RATE_ERROR_KEY, WINDOW_KEY, TIME_KEY};
@Nullable
private final SignalProcessor processor;
public AbstractAnalyzer(@Nullable SignalProcessor processor) {
this.processor = processor;
}
public AbstractAnalyzer() {
this.processor = null;
}
/**
* Return unsorted stream of events including events from frames.
* In theory, events after processing could be unsorted due to mixture of frames and events.
* In practice usually block have either frame or events, but not both.
*
* @param block
* @return
*/
public Stream<NumassEvent> getEvents(NumassBlock block, Meta config) {
int loChannel = config.getInt("window.lo", 0);
int upChannel = config.getInt("window.up", Integer.MAX_VALUE);
Stream<NumassEvent> res = getAllEvents(block).filter(event -> {
short channel = event.getChanel();
return channel >= loChannel && channel < upChannel;
});
if (config.getBoolean("sort", false)) {
res = res.sorted(Comparator.comparing(NumassEvent::getTimeOffset));
}
return res;
}
protected Stream<NumassEvent> getAllEvents(NumassBlock block) {
if (block.getFrames().count() == 0) {
return block.getEvents();
} else if (getProcessor() == null) {
throw new IllegalArgumentException("Signal processor needed to analyze frames");
} else {
return Stream.concat(block.getEvents(), block.getFrames().flatMap(getProcessor()::analyze));
}
}
/**
* Get table format for summary table
*
* @param config
* @return
*/
protected TableFormat getTableFormat(Meta config) {
return new TableFormatBuilder()
.addNumber(HV_KEY, X_VALUE_KEY)
.addNumber(LENGTH_KEY)
.addNumber(COUNT_KEY)
.addNumber(COUNT_RATE_KEY, Y_VALUE_KEY)
.addNumber(COUNT_RATE_ERROR_KEY, Y_ERROR_KEY)
.addColumn(WINDOW_KEY)
.addTime()
.build();
}
@Override
public Table analyzeSet(NumassSet set, Meta config) {
TableFormat format = getTableFormat(config);
return new ListTable.Builder(format)
.rows(set.getPoints().map(point -> analyzePoint(point, config)))
.build();
}
@Nullable
public SignalProcessor getProcessor() {
return processor;
}
}

View File

@ -1,25 +0,0 @@
package inr.numass.data.analyzers;
import hep.dataforge.meta.Meta;
import hep.dataforge.values.Values;
import inr.numass.data.api.NumassBlock;
import inr.numass.data.api.SignalProcessor;
import org.jetbrains.annotations.Nullable;
/**
* Block analyzer that can perform debunching
* Created by darksnake on 11.07.2017.
*/
public class DebunchAnalyzer extends AbstractAnalyzer {
public DebunchAnalyzer(@Nullable SignalProcessor processor) {
super(processor);
}
public DebunchAnalyzer() {
}
@Override
public Values analyze(NumassBlock block, Meta config) {
throw new UnsupportedOperationException("TODO");
}
}

View File

@ -1,43 +0,0 @@
package inr.numass.data.analyzers;
import hep.dataforge.meta.Meta;
import hep.dataforge.tables.ValueMap;
import hep.dataforge.values.Values;
import inr.numass.data.api.NumassBlock;
import inr.numass.data.api.SignalProcessor;
import org.jetbrains.annotations.Nullable;
/**
* A simple event counter
* Created by darksnake on 07.07.2017.
*/
public class SimpleAnalyzer extends AbstractAnalyzer {
public SimpleAnalyzer(@Nullable SignalProcessor processor) {
super(processor);
}
public SimpleAnalyzer() {
}
@Override
public Values analyze(NumassBlock block, Meta config) {
int loChannel = config.getInt("window.lo", 0);
int upChannel = config.getInt("window.up", Integer.MAX_VALUE);
long count = getEvents(block, config).count();
double length = (double) block.getLength().toNanos() / 1e9;
double countRate = (double) count / length;
double countRateError = Math.sqrt((double) count) / length;
return ValueMap.of(NAME_LIST,
length,
count,
countRate,
countRateError,
new Integer[]{loChannel, upChannel},
block.getStartTime());
}
}

View File

@ -1,77 +0,0 @@
package inr.numass.data.analyzers;
import hep.dataforge.meta.Meta;
import hep.dataforge.tables.TableFormat;
import hep.dataforge.tables.ValueMap;
import hep.dataforge.values.Value;
import hep.dataforge.values.Values;
import inr.numass.data.api.NumassAnalyzer;
import inr.numass.data.api.NumassBlock;
import inr.numass.data.api.NumassEvent;
import inr.numass.data.api.SignalProcessor;
import org.jetbrains.annotations.Nullable;
import java.util.Map;
import java.util.stream.Stream;
/**
* An analyzer dispatcher which uses different analyzer for different meta
* Created by darksnake on 11.07.2017.
*/
public class SmartAnalyzer extends AbstractAnalyzer {
private SimpleAnalyzer simpleAnalyzer = new SimpleAnalyzer();
private DebunchAnalyzer debunchAnalyzer = new DebunchAnalyzer();
private TimeAnalyzer timeAnalyzer = new TimeAnalyzer();
public SmartAnalyzer(@Nullable SignalProcessor processor) {
super(processor);
this.simpleAnalyzer = new SimpleAnalyzer(processor);
this.debunchAnalyzer = new DebunchAnalyzer(processor);
this.timeAnalyzer = new TimeAnalyzer(processor);
}
public SmartAnalyzer() {
}
private NumassAnalyzer getAnalyzer(Meta config){
if (config.hasValue("type")) {
switch (config.getString("type")) {
case "simple":
return simpleAnalyzer;
case "time":
return timeAnalyzer;
case "debunch":
return debunchAnalyzer;
default:
throw new IllegalArgumentException("Analyzer not found");
}
} else {
if(config.hasValue("t0")||config.hasMeta("t0")){
return timeAnalyzer;
} else {
return simpleAnalyzer;
}
}
}
@Override
public Values analyze(NumassBlock block, Meta config) {
NumassAnalyzer analyzer = getAnalyzer(config);
Map<String, Value> map = analyzer.analyze(block, config).asMap();
map.putIfAbsent(TimeAnalyzer.T0_KEY, Value.of(0d));
return new ValueMap(map);
}
@Override
public Stream<NumassEvent> getEvents(NumassBlock block, Meta config) {
return getAnalyzer(config).getEvents(block, config);
}
@Override
protected TableFormat getTableFormat(Meta config) {
if (config.hasValue(TimeAnalyzer.T0_KEY) || config.hasMeta(TimeAnalyzer.T0_KEY)) {
return timeAnalyzer.getTableFormat(config);
}
return super.getTableFormat(config);
}
}

View File

@ -1,209 +0,0 @@
package inr.numass.data.analyzers;
import hep.dataforge.description.ValueDef;
import hep.dataforge.meta.Meta;
import hep.dataforge.tables.TableFormat;
import hep.dataforge.tables.TableFormatBuilder;
import hep.dataforge.tables.ValueMap;
import hep.dataforge.values.Value;
import hep.dataforge.values.ValueType;
import hep.dataforge.values.Values;
import inr.numass.data.api.NumassBlock;
import inr.numass.data.api.NumassEvent;
import inr.numass.data.api.NumassPoint;
import inr.numass.data.api.SignalProcessor;
import javafx.util.Pair;
import org.jetbrains.annotations.Nullable;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.AtomicReference;
import java.util.stream.Stream;
import static hep.dataforge.tables.Adapters.*;
import static inr.numass.data.api.NumassPoint.HV_KEY;
/**
* An analyzer which uses time information from events
* Created by darksnake on 11.07.2017.
*/
public class TimeAnalyzer extends AbstractAnalyzer {
public static String T0_KEY = "t0";
public static final String[] NAME_LIST = {LENGTH_KEY, COUNT_KEY, COUNT_RATE_KEY, COUNT_RATE_ERROR_KEY, WINDOW_KEY, TIME_KEY, T0_KEY};
// public static String[] NAME_LIST_WITH_HV = {HV_KEY, LENGTH_KEY, COUNT_KEY, COUNT_RATE_KEY, COUNT_RATE_ERROR_KEY, WINDOW_KEY, TIME_KEY, T0_KEY};
public TimeAnalyzer(@Nullable SignalProcessor processor) {
super(processor);
}
public TimeAnalyzer() {
}
@Override
public Values analyze(NumassBlock block, Meta config) {
//In case points inside points
if (block instanceof NumassPoint) {
return analyzePoint((NumassPoint) block, config);
}
int loChannel = config.getInt("window.lo", 0);
int upChannel = config.getInt("window.up", Integer.MAX_VALUE);
long t0 = getT0(block, config);
AtomicLong totalN = new AtomicLong(0);
AtomicLong totalT = new AtomicLong(0);
getEventsWithDelay(block, config)
.filter(pair -> pair.getValue() >= t0)
.forEach(pair -> {
totalN.incrementAndGet();
//TODO add progress listener here
totalT.addAndGet(pair.getValue());
});
double countRate = 1e6 * totalN.get() / (totalT.get() / 1000 - t0 * totalN.get() / 1000);//1e9 / (totalT.get() / totalN.get() - t0);
double countRateError = countRate / Math.sqrt(totalN.get());
double length = totalT.get() / 1e9;
long count = (long) (length * countRate);
return ValueMap.of(NAME_LIST,
length,
count,
countRate,
countRateError,
new Integer[]{loChannel, upChannel},
block.getStartTime(),
(double) t0 / 1000d
);
}
@Override
public Values analyzePoint(NumassPoint point, Meta config) {
//Average count rates, do not sum events
Values res = point.getBlocks()
.map(it -> analyze(it, config))
.reduce(null, this::combineBlockResults);
Map<String, Value> map = new HashMap<>(res.asMap());
map.put(HV_KEY, Value.of(point.getVoltage()));
return new ValueMap(map);
}
/**
* Combine two blocks from the same point into one
*
* @param v1
* @param v2
* @return
*/
private Values combineBlockResults(Values v1, Values v2) {
if (v1 == null) {
return v2;
}
if (v2 == null) {
return v1;
}
double t1 = v1.getDouble(LENGTH_KEY);
double t2 = v2.getDouble(LENGTH_KEY);
double cr1 = v1.getDouble(COUNT_RATE_KEY);
double cr2 = v2.getDouble(COUNT_RATE_KEY);
double err1 = v1.getDouble(COUNT_RATE_ERROR_KEY);
double err2 = v2.getDouble(COUNT_RATE_ERROR_KEY);
double countRate = (t1 * cr1 + t2 * cr2) / (t1 + t2);
double countRateErr = Math.sqrt(Math.pow(t1 * err1 / (t1 + t2), 2) + Math.pow(t2 * err2 / (t1 + t2), 2));
return ValueMap.of(NAME_LIST,
v1.getDouble(LENGTH_KEY) + v2.getDouble(LENGTH_KEY),
v1.getInt(COUNT_KEY) + v2.getInt(COUNT_KEY),
countRate,
countRateErr,
v1.getValue(WINDOW_KEY),
v1.getValue(TIME_KEY),
v1.getDouble(T0_KEY)
);
}
@ValueDef(name = "t0", type = ValueType.NUMBER, info = "Constant t0 cut")
@ValueDef(name = "t0.crFraction", type = ValueType.NUMBER, info = "The relative fraction of events that should be removed by time cut")
@ValueDef(name = "t0.min", type = ValueType.NUMBER, def = "0", info = "Minimal t0")
private int getT0(NumassBlock block, Meta meta) {
if (meta.hasValue("t0")) {
return meta.getInt("t0");
} else if (meta.hasMeta("t0")) {
double fraction = meta.getDouble("t0.crFraction");
double cr = estimateCountRate(block);
if (cr < meta.getDouble("t0.minCR", 0)) {
return 0;
} else {
return (int) Math.max(-1e9 / cr * Math.log(1d - fraction), meta.getDouble("t0.min", 0));
}
} else {
return 0;
}
}
private double estimateCountRate(NumassBlock block) {
return (double) block.getEvents().count() / block.getLength().toMillis() * 1000;
}
/**
* The chain of event times in nanos
*
* @param block
* @param config
* @return
*/
public Stream<Pair<NumassEvent, Long>> getEventsWithDelay(NumassBlock block, Meta config) {
AtomicReference<NumassEvent> lastEvent = new AtomicReference<>(null);
Stream<NumassEvent> eventStream = super.getEvents(block, config);//using super implementation
return eventStream.map(event -> {
long res = lastEvent.get() == null ? 0L : event.getTimeOffset() - lastEvent.get().getTimeOffset();
if (res < 0) {
res = 0L;
}
lastEvent.set(event);
//TODO remove autoboxing somehow
return new Pair<>(event, res);
});
}
/**
* The filtered stream of events
*
* @param block
* @param config
* @return
*/
@Override
public Stream<NumassEvent> getEvents(NumassBlock block, Meta config) {
long t0 = getT0(block, config);
return getEventsWithDelay(block, config).filter(pair -> pair.getValue() >= t0).map(Pair::getKey);
}
@Override
protected TableFormat getTableFormat(Meta config) {
return new TableFormatBuilder()
.addNumber(HV_KEY, X_VALUE_KEY)
.addNumber(LENGTH_KEY)
.addNumber(COUNT_KEY)
.addNumber(COUNT_RATE_KEY, Y_VALUE_KEY)
.addNumber(COUNT_RATE_ERROR_KEY, Y_ERROR_KEY)
.addColumn(WINDOW_KEY)
.addTime()
.addNumber(T0_KEY)
.build();
}
}

View File

@ -0,0 +1,104 @@
/*
* Copyright 2017 Alexander Nozik.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package inr.numass.data.analyzers
import hep.dataforge.meta.Meta
import hep.dataforge.tables.Adapters.*
import hep.dataforge.tables.ListTable
import hep.dataforge.tables.Table
import hep.dataforge.tables.TableFormat
import hep.dataforge.tables.TableFormatBuilder
import inr.numass.data.api.NumassBlock
import inr.numass.data.api.NumassEvent
import inr.numass.data.api.NumassPoint.HV_KEY
import inr.numass.data.api.NumassSet
import inr.numass.data.api.SignalProcessor
import java.lang.IllegalArgumentException
import java.util.*
import java.util.stream.Stream
/**
* Created by darksnake on 11.07.2017.
*/
abstract class AbstractAnalyzer @JvmOverloads constructor(private val processor: SignalProcessor? = null) : NumassAnalyzer {
/**
* Return unsorted stream of events including events from frames.
* In theory, events after processing could be unsorted due to mixture of frames and events.
* In practice usually block have either frame or events, but not both.
*
* @param block
* @return
*/
override fun getEvents(block: NumassBlock, meta: Meta): Stream<NumassEvent> {
val loChannel = meta.getInt("window.lo", 0)
val upChannel = meta.getInt("window.up", Integer.MAX_VALUE)
var res = getAllEvents(block).filter { event ->
event.chanel.toInt() in loChannel..(upChannel - 1)
}
if (meta.getBoolean("sort", false)) {
res = res.sorted(Comparator.comparing<NumassEvent, Long> { it.timeOffset })
}
return res
}
protected fun getAllEvents(block: NumassBlock): Stream<NumassEvent> {
return when {
block.frames.count() == 0L -> block.events
processor == null -> throw IllegalArgumentException("Signal processor needed to analyze frames")
else -> Stream.concat(block.events, block.frames.flatMap { processor.analyze(it) })
}
}
/**
* Get table format for summary table
*
* @param config
* @return
*/
protected open fun getTableFormat(config: Meta): TableFormat {
return TableFormatBuilder()
.addNumber(HV_KEY, X_VALUE_KEY)
.addNumber(NumassAnalyzer.LENGTH_KEY)
.addNumber(NumassAnalyzer.COUNT_KEY)
.addNumber(NumassAnalyzer.COUNT_RATE_KEY, Y_VALUE_KEY)
.addNumber(NumassAnalyzer.COUNT_RATE_ERROR_KEY, Y_ERROR_KEY)
.addColumn(NumassAnalyzer.WINDOW_KEY)
.addTime()
.build()
}
override fun analyzeSet(set: NumassSet, config: Meta): Table {
val format = getTableFormat(config)
return ListTable.Builder(format)
.rows(set.points.map { point -> analyzePoint(point, config) })
.build()
}
companion object {
val NAME_LIST = arrayOf(
NumassAnalyzer.LENGTH_KEY,
NumassAnalyzer.COUNT_KEY,
NumassAnalyzer.COUNT_RATE_KEY,
NumassAnalyzer.COUNT_RATE_ERROR_KEY,
NumassAnalyzer.WINDOW_KEY,
NumassAnalyzer.TIME_KEY
)
}
}

View File

@ -0,0 +1,33 @@
/*
* Copyright 2017 Alexander Nozik.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package inr.numass.data.analyzers
import hep.dataforge.meta.Meta
import hep.dataforge.values.Values
import inr.numass.data.api.NumassBlock
import inr.numass.data.api.SignalProcessor
/**
* Block analyzer that can perform debunching
* Created by darksnake on 11.07.2017.
*/
class DebunchAnalyzer @JvmOverloads constructor(private val processor: SignalProcessor? = null) : AbstractAnalyzer(processor) {
override fun analyze(block: NumassBlock, config: Meta): Values {
TODO()
}
}

View File

@ -0,0 +1,262 @@
/*
* Copyright 2017 Alexander Nozik.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package inr.numass.data.analyzers
import hep.dataforge.meta.Meta
import hep.dataforge.tables.*
import hep.dataforge.tables.Adapters.*
import hep.dataforge.values.Value
import hep.dataforge.values.Values
import inr.numass.data.api.NumassBlock
import inr.numass.data.api.NumassEvent
import inr.numass.data.api.NumassPoint
import inr.numass.data.api.NumassPoint.HV_KEY
import inr.numass.data.api.NumassSet
import java.util.*
import java.util.concurrent.atomic.AtomicLong
import java.util.concurrent.atomic.AtomicReference
import java.util.stream.IntStream
import java.util.stream.Stream
import kotlin.streams.asSequence
/**
* A general raw data analysis utility. Could have different implementations
* Created by darksnake on 06-Jul-17.
*/
interface NumassAnalyzer {
/**
* Perform analysis on block. The values for count rate, its error and point length in nanos must
* exist, but occasionally additional values could also be presented.
*
* @param block
* @return
*/
fun analyze(block: NumassBlock, config: Meta = Meta.empty()): Values
/**
* Analysis result for point including hv information
* @param point
* @param config
* @return
*/
fun analyzePoint(point: NumassPoint, config: Meta = Meta.empty()): Values {
val map = HashMap(analyze(point, config).asMap())
map.put(HV_KEY, Value.of(point.voltage))
return ValueMap(map)
}
/**
* Return unsorted stream of events including events from frames
*
* @param block
* @return
*/
fun getEvents(block: NumassBlock, meta: Meta = Meta.empty()): Stream<NumassEvent>
/**
* Analyze the whole set. And return results as a table
*
* @param set
* @param config
* @return
*/
fun analyzeSet(set: NumassSet, config: Meta): Table
/**
* Get the approximate number of events in block. Not all analyzers support precise event counting
*
* @param block
* @param config
* @return
*/
fun getCount(block: NumassBlock, config: Meta): Long {
return analyze(block, config).getValue(COUNT_KEY).numberValue().toLong()
}
/**
* Get approximate effective point length in nanos. It is not necessary corresponds to real point length.
*
* @param block
* @param config
* @return
*/
fun getLength(block: NumassBlock, config: Meta): Long {
return analyze(block, config).getValue(LENGTH_KEY).numberValue().toLong()
}
fun getSpectrum(block: NumassBlock, config: Meta): Table {
val seconds = block.length.toMillis().toDouble() / 1000.0
return getSpectrum(seconds, getEvents(block, config).asSequence(), config)
}
companion object {
const val CHANNEL_KEY = "channel"
const val COUNT_KEY = "count"
const val LENGTH_KEY = "length"
const val COUNT_RATE_KEY = "cr"
const val COUNT_RATE_ERROR_KEY = "crErr"
const val WINDOW_KEY = "window"
const val TIME_KEY = "timestamp"
val DEFAULT_ANALYZER: NumassAnalyzer = SmartAnalyzer()
val ADAPTER: ValuesAdapter = Adapters.buildXYAdapter(CHANNEL_KEY, COUNT_RATE_KEY)
// val MAX_CHANNEL = 10000
fun Table.withBinning(binSize: Int, loChannel: Int? = null, upChannel: Int? = null): Table {
return spectrumWithBinning(this,binSize, loChannel, upChannel)
}
/**
* Subtract reference spectrum.
*
* @param sp1
* @param sp2
* @return
*/
fun subtractSpectrum(sp1: Table, sp2: Table): Table {
val format = TableFormatBuilder()
.addNumber(CHANNEL_KEY, X_VALUE_KEY)
.addNumber(COUNT_RATE_KEY, Y_VALUE_KEY)
.addNumber(COUNT_RATE_ERROR_KEY, Y_ERROR_KEY)
.build()
val builder = ListTable.Builder(format)
sp1.forEach { row1 ->
val channel = row1.getDouble(CHANNEL_KEY)
val row2 = sp2.rows.asSequence().find { it.getDouble(CHANNEL_KEY) == channel } //t2[channel]
if (row2 == null) {
builder.row(row1)
} else {
val value = Math.max(row1.getDouble(COUNT_RATE_KEY) - row2.getDouble(COUNT_RATE_KEY), 0.0)
val error1 = row1.getDouble(COUNT_RATE_ERROR_KEY)
val error2 = row2.getDouble(COUNT_RATE_ERROR_KEY)
val error = Math.sqrt(error1 * error1 + error2 * error2)
builder.row(channel, value, error)
}
}
return builder.build()
}
}
}
/**
* Calculate number of counts in the given channel
*
* @param spectrum
* @param loChannel
* @param upChannel
* @return
*/
fun countInWindow(spectrum: Table, loChannel: Short, upChannel: Short): Long {
return spectrum.rows.filter { row ->
row.getInt(NumassAnalyzer.CHANNEL_KEY) in loChannel..(upChannel - 1)
}.mapToLong { it -> it.getValue(NumassAnalyzer.COUNT_KEY).numberValue().toLong() }.sum()
}
/**
* Calculate the amplitude spectrum for a given block. The s
*
* @param block
* @param config
* @return
*/
fun getSpectrum(length: Double, events: Sequence<NumassEvent>, config: Meta = Meta.empty()): Table {
val format = TableFormatBuilder()
.addNumber(NumassAnalyzer.CHANNEL_KEY, X_VALUE_KEY)
.addNumber(NumassAnalyzer.COUNT_KEY)
.addNumber(NumassAnalyzer.COUNT_RATE_KEY, Y_VALUE_KEY)
.addNumber(NumassAnalyzer.COUNT_RATE_ERROR_KEY, Y_ERROR_KEY)
.updateMeta { metaBuilder -> metaBuilder.setNode("config", config) }
.build()
//optimized for fastest computation
val spectrum: MutableMap<Int, AtomicLong> = HashMap()
events.forEach { event ->
val channel = event.chanel.toInt()
spectrum.getOrPut(channel) {
AtomicLong(0)
}.incrementAndGet()
}
val minChannel = config.getInt("window.lo") { spectrum.keys.min() }
val maxChannel = config.getInt("window.up") { spectrum.keys.max() }
return ListTable.Builder(format)
.rows(IntStream.range(minChannel, maxChannel)
.mapToObj { i ->
val value = spectrum[i]?.get() ?: 0
ValueMap.of(
format.namesAsArray(),
i,
value,
value.toDouble() / length,
Math.sqrt(value.toDouble()) / length
)
}
).build()
}
/**
* Apply window and binning to a spectrum. Empty bins are filled with zeroes
*
* @param binSize
* @param loChannel autodefined if negative
* @param upChannel autodefined if negative
* @return
*/
@JvmOverloads
fun spectrumWithBinning(spectrum: Table, binSize: Int, loChannel: Int? = null, upChannel: Int? = null): Table {
val format = TableFormatBuilder()
.addNumber(NumassAnalyzer.CHANNEL_KEY, X_VALUE_KEY)
.addNumber(NumassAnalyzer.COUNT_KEY, Y_VALUE_KEY)
.addNumber(NumassAnalyzer.COUNT_RATE_KEY)
.addNumber(NumassAnalyzer.COUNT_RATE_ERROR_KEY)
.addNumber("binSize")
val builder = ListTable.Builder(format)
var chan = loChannel ?: spectrum.getColumn(NumassAnalyzer.CHANNEL_KEY).stream().mapToInt { it.intValue() }.min().orElse(0)
val top = upChannel ?: spectrum.getColumn(NumassAnalyzer.CHANNEL_KEY).stream().mapToInt { it.intValue() }.max().orElse(1)
while (chan < top - binSize) {
val count = AtomicLong(0)
val countRate = AtomicReference(0.0)
val countRateDispersion = AtomicReference(0.0)
val binLo = chan
val binUp = chan + binSize
spectrum.rows.filter { row ->
row.getInt(NumassAnalyzer.CHANNEL_KEY) in binLo..(binUp - 1)
}.forEach { row ->
count.addAndGet(row.getValue(NumassAnalyzer.COUNT_KEY, 0).longValue())
countRate.accumulateAndGet(row.getDouble(NumassAnalyzer.COUNT_RATE_KEY, 0.0)) { d1, d2 -> d1 + d2 }
countRateDispersion.accumulateAndGet(Math.pow(row.getDouble(NumassAnalyzer.COUNT_RATE_ERROR_KEY, 0.0), 2.0)) { d1, d2 -> d1 + d2 }
}
val bin = Math.min(binSize, top - chan)
builder.row(chan.toDouble() + bin.toDouble() / 2.0, count.get(), countRate.get(), Math.sqrt(countRateDispersion.get()), bin)
chan += binSize
}
return builder.build()
}

View File

@ -0,0 +1,50 @@
/*
* Copyright 2017 Alexander Nozik.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package inr.numass.data.analyzers
import hep.dataforge.meta.Meta
import hep.dataforge.tables.ValueMap
import hep.dataforge.values.Values
import inr.numass.data.api.NumassBlock
import inr.numass.data.api.SignalProcessor
/**
* A simple event counter
* Created by darksnake on 07.07.2017.
*/
class SimpleAnalyzer @JvmOverloads constructor(private val processor: SignalProcessor? = null) : AbstractAnalyzer(processor) {
override fun analyze(block: NumassBlock, config: Meta): Values {
val loChannel = config.getInt("window.lo", 0)
val upChannel = config.getInt("window.up", Integer.MAX_VALUE)
val count = getEvents(block, config).count()
val length = block.length.toNanos().toDouble() / 1e9
val countRate = count.toDouble() / length
val countRateError = Math.sqrt(count.toDouble()) / length
return ValueMap.of(AbstractAnalyzer.NAME_LIST,
length,
count,
countRate,
countRateError,
arrayOf(loChannel, upChannel),
block.startTime)
}
}

View File

@ -0,0 +1,72 @@
/*
* Copyright 2017 Alexander Nozik.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package inr.numass.data.analyzers
import hep.dataforge.meta.Meta
import hep.dataforge.tables.TableFormat
import hep.dataforge.tables.ValueMap
import hep.dataforge.values.Value
import hep.dataforge.values.Values
import inr.numass.data.api.NumassBlock
import inr.numass.data.api.NumassEvent
import inr.numass.data.api.SignalProcessor
import java.util.stream.Stream
/**
* An analyzer dispatcher which uses different analyzer for different meta
* Created by darksnake on 11.07.2017.
*/
class SmartAnalyzer(processor: SignalProcessor? = null) : AbstractAnalyzer(processor) {
private val simpleAnalyzer = SimpleAnalyzer(processor)
private val debunchAnalyzer = DebunchAnalyzer(processor)
private val timeAnalyzer = TimeAnalyzer(processor)
private fun getAnalyzer(config: Meta): NumassAnalyzer {
return if (config.hasValue("type")) {
when (config.getString("type")) {
"simple" -> simpleAnalyzer
"time" -> timeAnalyzer
"debunch" -> debunchAnalyzer
else -> throw IllegalArgumentException("Analyzer not found")
}
} else {
if (config.hasValue("t0") || config.hasMeta("t0")) {
timeAnalyzer
} else {
simpleAnalyzer
}
}
}
override fun analyze(block: NumassBlock, config: Meta): Values {
val analyzer = getAnalyzer(config)
val map = analyzer.analyze(block, config).asMap()
map.putIfAbsent(TimeAnalyzer.T0_KEY, Value.of(0.0))
return ValueMap(map)
}
override fun getEvents(block: NumassBlock, config: Meta): Stream<NumassEvent> {
return getAnalyzer(config).getEvents(block, config)
}
override fun getTableFormat(config: Meta): TableFormat {
return if (config.hasValue(TimeAnalyzer.T0_KEY) || config.hasMeta(TimeAnalyzer.T0_KEY)) {
timeAnalyzer.getTableFormat(config)
} else super.getTableFormat(config)
}
}

View File

@ -0,0 +1,231 @@
/*
* Copyright 2017 Alexander Nozik.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package inr.numass.data.analyzers
import hep.dataforge.description.ValueDef
import hep.dataforge.description.ValueDefs
import hep.dataforge.meta.Meta
import hep.dataforge.tables.Adapters.*
import hep.dataforge.tables.TableFormat
import hep.dataforge.tables.TableFormatBuilder
import hep.dataforge.tables.ValueMap
import hep.dataforge.values.Value
import hep.dataforge.values.ValueType
import hep.dataforge.values.Values
import inr.numass.data.api.NumassBlock
import inr.numass.data.api.NumassEvent
import inr.numass.data.api.NumassPoint
import inr.numass.data.api.NumassPoint.HV_KEY
import inr.numass.data.api.SignalProcessor
import java.util.*
import java.util.concurrent.atomic.AtomicLong
import java.util.stream.Stream
import kotlin.streams.asSequence
import kotlin.streams.asStream
/**
* An analyzer which uses time information from events
* Created by darksnake on 11.07.2017.
*/
class TimeAnalyzer @JvmOverloads constructor(private val processor: SignalProcessor? = null) : AbstractAnalyzer(processor) {
override fun analyze(block: NumassBlock, config: Meta): Values {
//In case points inside points
if (block is NumassPoint) {
return analyzePoint(block, config)
}
val loChannel = config.getInt("window.lo", 0)
val upChannel = config.getInt("window.up", Integer.MAX_VALUE)
val t0 = getT0(block, config).toLong()
val totalN = AtomicLong(0)
val totalT = AtomicLong(0)
getEventsWithDelay(block, config)
.filter { pair -> pair.second >= t0 }
.forEach { pair ->
totalN.incrementAndGet()
//TODO add progress listener here
totalT.addAndGet(pair.second)
}
val countRate = 1e6 * totalN.get() / (totalT.get() / 1000 - t0 * totalN.get() / 1000)//1e9 / (totalT.get() / totalN.get() - t0);
val countRateError = countRate / Math.sqrt(totalN.get().toDouble())
val length = totalT.get() / 1e9
val count = (length * countRate).toLong()
return ValueMap.of(NAME_LIST,
length,
count,
countRate,
countRateError,
arrayOf(loChannel, upChannel),
block.startTime,
t0.toDouble() / 1000.0
)
}
override fun analyzePoint(point: NumassPoint, config: Meta): Values {
//Average count rates, do not sum events
val res = point.blocks
.map { it -> analyze(it, config) }
.reduce(null) { v1, v2 -> this.combineBlockResults(v1, v2) }
val map = HashMap(res.asMap())
map.put(HV_KEY, Value.of(point.voltage))
return ValueMap(map)
}
/**
* Combine two blocks from the same point into one
*
* @param v1
* @param v2
* @return
*/
private fun combineBlockResults(v1: Values?, v2: Values?): Values? {
if (v1 == null) {
return v2
}
if (v2 == null) {
return v1
}
val t1 = v1.getDouble(NumassAnalyzer.LENGTH_KEY)
val t2 = v2.getDouble(NumassAnalyzer.LENGTH_KEY)
val cr1 = v1.getDouble(NumassAnalyzer.COUNT_RATE_KEY)
val cr2 = v2.getDouble(NumassAnalyzer.COUNT_RATE_KEY)
val err1 = v1.getDouble(NumassAnalyzer.COUNT_RATE_ERROR_KEY)
val err2 = v2.getDouble(NumassAnalyzer.COUNT_RATE_ERROR_KEY)
val countRate = (t1 * cr1 + t2 * cr2) / (t1 + t2)
val countRateErr = Math.sqrt(Math.pow(t1 * err1 / (t1 + t2), 2.0) + Math.pow(t2 * err2 / (t1 + t2), 2.0))
return ValueMap.of(NAME_LIST,
v1.getDouble(NumassAnalyzer.LENGTH_KEY) + v2.getDouble(NumassAnalyzer.LENGTH_KEY),
v1.getInt(NumassAnalyzer.COUNT_KEY) + v2.getInt(NumassAnalyzer.COUNT_KEY),
countRate,
countRateErr,
v1.getValue(NumassAnalyzer.WINDOW_KEY),
v1.getValue(NumassAnalyzer.TIME_KEY),
v1.getDouble(T0_KEY)
)
}
@ValueDefs(
ValueDef(name = "t0", type = arrayOf(ValueType.NUMBER), info = "Constant t0 cut"),
ValueDef(name = "t0.crFraction", type = arrayOf(ValueType.NUMBER), info = "The relative fraction of events that should be removed by time cut"),
ValueDef(name = "t0.min", type = arrayOf(ValueType.NUMBER), def = "0", info = "Minimal t0")
)
private fun getT0(block: NumassBlock, meta: Meta): Int {
return if (meta.hasValue("t0")) {
meta.getInt("t0")!!
} else if (meta.hasMeta("t0")) {
val fraction = meta.getDouble("t0.crFraction")!!
val cr = estimateCountRate(block)
if (cr < meta.getDouble("t0.minCR", 0.0)) {
0
} else {
Math.max(-1e9 / cr * Math.log(1.0 - fraction), meta.getDouble("t0.min", 0.0)!!).toInt()
}
} else {
0
}
}
private fun estimateCountRate(block: NumassBlock): Double {
return block.events.count().toDouble() / block.length.toMillis() * 1000
}
fun getEventsPairs(block: NumassBlock, config: Meta): Sequence<Pair<NumassEvent, NumassEvent>> {
return Sequence { getEvents(block, config).iterator() }.zipWithNext()
}
/**
* The chain of event times in nanos
*
* @param block
* @param config
* @return
*/
fun getEventsWithDelay(block: NumassBlock, config: Meta): Stream<Pair<NumassEvent, Long>> {
return super.getEvents(block, config).asSequence().zipWithNext { prev, next ->
val delay = Math.max(next.timeOffset - prev.timeOffset, 0)
Pair(prev, delay)
}.asStream()
// val lastEvent = AtomicReference<NumassEvent>(null)
//
// val eventStream = super.getEvents(block, config)//using super implementation
//
// return eventStream.map { event ->
// var res = if (lastEvent.get() == null) 0L else event.timeOffset - lastEvent.get().timeOffset
//
// if (res < 0) {
// res = 0L
// }
//
// lastEvent.set(event)
// //TODO remove autoboxing somehow
// Pair(event, res)
// }
}
/**
* The filtered stream of events
*
* @param block
* @param config
* @return
*/
override fun getEvents(block: NumassBlock, config: Meta): Stream<NumassEvent> {
val t0 = getT0(block, config).toLong()
return getEventsWithDelay(block, config).filter { pair -> pair.second >= t0 }.map { it.first }
}
public override fun getTableFormat(config: Meta): TableFormat {
return TableFormatBuilder()
.addNumber(HV_KEY, X_VALUE_KEY)
.addNumber(NumassAnalyzer.LENGTH_KEY)
.addNumber(NumassAnalyzer.COUNT_KEY)
.addNumber(NumassAnalyzer.COUNT_RATE_KEY, Y_VALUE_KEY)
.addNumber(NumassAnalyzer.COUNT_RATE_ERROR_KEY, Y_ERROR_KEY)
.addColumn(NumassAnalyzer.WINDOW_KEY)
.addTime()
.addNumber(T0_KEY)
.build()
}
companion object {
const val T0_KEY = "t0"
val NAME_LIST = arrayOf(
NumassAnalyzer.LENGTH_KEY,
NumassAnalyzer.COUNT_KEY,
NumassAnalyzer.COUNT_RATE_KEY,
NumassAnalyzer.COUNT_RATE_ERROR_KEY,
NumassAnalyzer.WINDOW_KEY,
NumassAnalyzer.TIME_KEY,
T0_KEY
)
}
}

View File

@ -23,7 +23,7 @@ class PointAnalyzer {
.fill(
analyzer
.getEventsWithDelay(point, Grind.buildMeta("window.lo": loChannel, "window.up": upChannel))
.mapToDouble { it.value / 1000 as double }
.mapToDouble { it.second / 1000 as double }
)
}

View File

@ -14,8 +14,8 @@ import hep.dataforge.tables.ColumnTable
import hep.dataforge.tables.Table
import inr.numass.NumassPlugin
import inr.numass.data.NumassDataUtils
import inr.numass.data.analyzers.NumassAnalyzer
import inr.numass.data.analyzers.SmartAnalyzer
import inr.numass.data.api.NumassAnalyzer
import inr.numass.data.api.NumassSet
import inr.numass.data.storage.NumassStorage
import inr.numass.data.storage.NumassStorageFactory

View File

@ -23,7 +23,7 @@ import inr.numass.data.storage.NumassStorageFactory
Context ctx = Global.instance()
ctx.getPluginManager().load(PlotManager)
ctx.getPluginManager().load(NumassPlugin.class)
ctx.getPluginManager().load(NumassPlugin)
new GrindShell(ctx).eval {
File rootDir = new File("D:\\Work\\Numass\\data\\2017_05\\Fill_2")

View File

@ -7,7 +7,7 @@ import hep.dataforge.grind.helpers.PlotHelper
import hep.dataforge.tables.ValueMap
import inr.numass.NumassPlugin
import inr.numass.data.PointAnalyzer
import inr.numass.data.api.NumassAnalyzer
import inr.numass.data.analyzers.NumassAnalyzer
import inr.numass.data.api.NumassPoint
import inr.numass.data.storage.ProtoNumassPoint

View File

@ -25,8 +25,8 @@ import inr.numass.data.NumassDataUtils
import javafx.application.Platform
import static hep.dataforge.grind.Grind.buildMeta
import static inr.numass.data.api.NumassAnalyzer.CHANNEL_KEY
import static inr.numass.data.api.NumassAnalyzer.COUNT_RATE_KEY
import static inr.numass.data.analyzers.NumassAnalyzer.CHANNEL_KEY
import static inr.numass.data.analyzers.NumassAnalyzer.COUNT_RATE_KEY
Context ctx = Global.instance()
ctx.getPluginManager().load(PlotManager)

View File

@ -6,7 +6,7 @@ import hep.dataforge.tables.Table
import hep.dataforge.tables.TableTransform
import hep.dataforge.tables.ValueMap
import hep.dataforge.values.Values
import inr.numass.data.NumassDataUtils
import inr.numass.data.analyzers.NumassAnalyzerKt
import org.apache.commons.math3.analysis.ParametricUnivariateFunction
import org.apache.commons.math3.exception.DimensionMismatchException
import org.apache.commons.math3.fitting.SimpleCurveFitter
@ -14,8 +14,8 @@ import org.apache.commons.math3.fitting.WeightedObservedPoint
import java.util.stream.Collectors
import static inr.numass.data.api.NumassAnalyzer.CHANNEL_KEY
import static inr.numass.data.api.NumassAnalyzer.COUNT_RATE_KEY
import static inr.numass.data.analyzers.NumassAnalyzer.CHANNEL_KEY
import static inr.numass.data.analyzers.NumassAnalyzer.COUNT_RATE_KEY
@CompileStatic
class UnderflowFitter {
@ -33,7 +33,7 @@ class UnderflowFitter {
double a = fitRes[0];
double sigma = fitRes[1];
return ValueMap.of(pointNames, voltage, a, sigma, a * sigma * Math.exp(xLow / sigma) / norm + 1d);
return ValueMap.of(pointNames, voltage, a, sigma, a * sigma * Math.exp(xLow / sigma as double) / norm + 1d);
}
static Table fitAllPoints(Map<Double, Table> data, int xLow, int xHigh, int upper, int binning) {
@ -56,7 +56,7 @@ class UnderflowFitter {
throw new IllegalArgumentException("Wrong borders for underflow calculation");
}
Table binned = TableTransform.filter(
NumassDataUtils.spectrumWithBinning(spectrum, binning),
NumassAnalyzerKt.spectrumWithBinning(spectrum, binning),
CHANNEL_KEY,
xLow,
xHigh
@ -89,7 +89,7 @@ class UnderflowFitter {
double a = parameters[0];
double sigma = parameters[1];
//return a * (Math.exp(x / sigma) - 1);
return a * Math.exp(x / sigma);
return a * Math.exp(x / sigma as double);
}
@Override
@ -99,7 +99,7 @@ class UnderflowFitter {
}
double a = parameters[0];
double sigma = parameters[1];
return [Math.exp(x / sigma), -a * x / sigma / sigma * Math.exp(x / sigma)] as double[]
return [Math.exp(x / sigma as double), -a * x / sigma / sigma * Math.exp(x / sigma as double)] as double[]
}
}

View File

@ -8,8 +8,8 @@ import hep.dataforge.grind.actions.GrindPipe
import hep.dataforge.meta.Meta
import hep.dataforge.storage.commons.StorageUtils
import hep.dataforge.tables.Table
import inr.numass.data.analyzers.NumassAnalyzer
import inr.numass.data.analyzers.TimeAnalyzer
import inr.numass.data.api.NumassAnalyzer
import inr.numass.data.api.NumassPoint
import inr.numass.data.api.NumassSet
import inr.numass.data.api.SimpleNumassPoint

View File

@ -28,7 +28,7 @@ import hep.dataforge.tables.ValueMap;
import hep.dataforge.values.Value;
import hep.dataforge.values.Values;
import inr.numass.NumassUtils;
import inr.numass.data.api.NumassAnalyzer;
import inr.numass.data.analyzers.NumassAnalyzer;
import inr.numass.data.api.NumassPoint;
import javafx.util.Pair;
import org.apache.commons.math3.analysis.interpolation.SplineInterpolator;
@ -42,7 +42,6 @@ import java.util.TreeMap;
import java.util.concurrent.CopyOnWriteArrayList;
import static hep.dataforge.values.ValueType.NUMBER;
import static inr.numass.data.analyzers.AbstractAnalyzer.TIME_KEY;
/**
* @author Darksnake
@ -211,7 +210,7 @@ public class MonitorCorrectAction extends OneToOneAction<Table, Table> {
}
private Instant getTime(Values point) {
return point.getValue(TIME_KEY).timeValue();
return point.getValue(NumassAnalyzer.TIME_KEY).timeValue();
}
private int getTotal(Values point) {

View File

@ -29,7 +29,7 @@ import java.time.Instant;
import java.util.ArrayList;
import java.util.List;
import static inr.numass.data.api.NumassAnalyzer.COUNT_RATE_KEY;
import static inr.numass.data.analyzers.NumassAnalyzer.COUNT_RATE_KEY;
/**
* A generator for Numass events with given energy spectrum

View File

@ -11,8 +11,7 @@ import hep.dataforge.tables.Table;
import hep.dataforge.tables.TableTransform;
import hep.dataforge.tables.ValueMap;
import hep.dataforge.values.Values;
import inr.numass.data.NumassDataUtils;
import inr.numass.data.api.NumassAnalyzer;
import inr.numass.data.analyzers.NumassAnalyzer;
import inr.numass.data.api.NumassPoint;
import org.apache.commons.math3.analysis.ParametricUnivariateFunction;
import org.apache.commons.math3.exception.DimensionMismatchException;
@ -22,8 +21,9 @@ import org.apache.commons.math3.fitting.WeightedObservedPoint;
import java.util.List;
import java.util.stream.Collectors;
import static inr.numass.data.api.NumassAnalyzer.CHANNEL_KEY;
import static inr.numass.data.api.NumassAnalyzer.COUNT_RATE_KEY;
import static inr.numass.data.analyzers.NumassAnalyzer.CHANNEL_KEY;
import static inr.numass.data.analyzers.NumassAnalyzer.COUNT_RATE_KEY;
import static inr.numass.data.analyzers.NumassAnalyzerKt.spectrumWithBinning;
/**
* A class to calculate underflow correction
@ -110,7 +110,7 @@ public class UnderflowCorrection {
throw new IllegalArgumentException("Wrong borders for underflow calculation");
}
Table binned = TableTransform.filter(
NumassDataUtils.spectrumWithBinning(spectrum, binning),
spectrumWithBinning(spectrum, binning),
CHANNEL_KEY,
xLow,
xHigh

View File

@ -28,7 +28,7 @@ import hep.dataforge.stat.models.WeightedXYModel
import hep.dataforge.stat.models.XYModel
import hep.dataforge.tables.Adapters
import hep.dataforge.tables.ValuesAdapter
import inr.numass.data.api.NumassAnalyzer
import inr.numass.data.analyzers.NumassAnalyzer
import inr.numass.data.api.NumassPoint
import inr.numass.models.*
import inr.numass.models.sterile.SterileNeutrinoSpectrum

View File

@ -32,7 +32,7 @@ import hep.dataforge.tables.Table
import hep.dataforge.tables.ValueMap
import hep.dataforge.values.ValueType
import hep.dataforge.values.Values
import inr.numass.data.api.NumassAnalyzer
import inr.numass.data.analyzers.NumassAnalyzer
import inr.numass.data.api.NumassPoint
import inr.numass.data.api.NumassSet
import inr.numass.models.FSS

View File

@ -10,7 +10,7 @@ import hep.dataforge.tables.Table
import hep.dataforge.values.ValueType.NUMBER
import hep.dataforge.values.ValueType.STRING
import inr.numass.NumassUtils
import inr.numass.data.api.NumassAnalyzer
import inr.numass.data.analyzers.NumassAnalyzer
import inr.numass.data.api.NumassSet
/**

View File

@ -26,7 +26,7 @@ import hep.dataforge.meta.Meta
import hep.dataforge.tables.*
import hep.dataforge.values.Values
import inr.numass.NumassUtils
import inr.numass.data.api.NumassAnalyzer
import inr.numass.data.analyzers.NumassAnalyzer
import inr.numass.data.api.NumassPoint
import java.util.*

View File

@ -12,8 +12,8 @@ import hep.dataforge.plots.data.DataPlot
import hep.dataforge.tables.Adapters
import hep.dataforge.tables.Table
import hep.dataforge.values.ValueType
import inr.numass.data.analyzers.NumassAnalyzer
import inr.numass.data.analyzers.TimeAnalyzer
import inr.numass.data.api.NumassAnalyzer
import inr.numass.data.api.NumassPoint
/**
@ -60,7 +60,7 @@ class TimeAnalyzerAction : OneToOneAction<NumassPoint, Table>() {
val histogram = UnivariateHistogram.buildUniform(0.0, binSize * binNum, binSize)
.fill(analyzer
.getEventsWithDelay(input, inputMeta)
.mapToDouble { it.value / 1000.0 }
.mapToDouble { it.second / 1000.0 }
).asTable()
//.histogram(input, loChannel, upChannel, binSize, binNum).asTable();

View File

@ -12,8 +12,8 @@ import hep.dataforge.plots.data.DataPlot
import hep.dataforge.tables.Adapters
import hep.dataforge.tables.Table
import hep.dataforge.values.ValueType
import inr.numass.data.analyzers.NumassAnalyzer
import inr.numass.data.analyzers.TimeAnalyzer
import inr.numass.data.api.NumassAnalyzer
import inr.numass.data.api.NumassPoint
/**
@ -60,7 +60,7 @@ class TimeSpectrumAction : OneToOneAction<NumassPoint, Table>() {
val histogram = UnivariateHistogram.buildUniform(0.0, binSize * binNum, binSize)
.fill(analyzer
.getEventsWithDelay(input, inputMeta)
.mapToDouble { it.value / 1000.0 }
.mapToDouble { it.second / 1000.0 }
).asTable()
//.histogram(input, loChannel, upChannel, binSize, binNum).asTable();

View File

@ -18,8 +18,8 @@ import hep.dataforge.values.ValueType.NUMBER
import hep.dataforge.values.ValueType.STRING
import hep.dataforge.values.Values
import inr.numass.NumassUtils
import inr.numass.data.api.NumassAnalyzer.COUNT_RATE_ERROR_KEY
import inr.numass.data.api.NumassAnalyzer.COUNT_RATE_KEY
import inr.numass.data.analyzers.NumassAnalyzer.Companion.COUNT_RATE_ERROR_KEY
import inr.numass.data.analyzers.NumassAnalyzer.Companion.COUNT_RATE_KEY
import inr.numass.pointExpression
import java.util.*

View File

@ -0,0 +1,102 @@
/*
* Copyright 2017 Alexander Nozik.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package inr.numass.scripts
import hep.dataforge.fx.plots.PlotManager
import hep.dataforge.kodex.buildContext
import hep.dataforge.kodex.buildMeta
import hep.dataforge.meta.Meta
import hep.dataforge.plots.PlotPlugin
import hep.dataforge.plots.data.DataPlot
import hep.dataforge.tables.Adapters
import inr.numass.NumassPlugin
import inr.numass.data.NumassDataUtils
import inr.numass.data.analyzers.NumassAnalyzer
import inr.numass.data.analyzers.TimeAnalyzer
import inr.numass.data.analyzers.getSpectrum
import inr.numass.data.api.NumassSet
import inr.numass.data.storage.NumassStorageFactory
import java.io.File
import kotlin.streams.asSequence
fun main(args: Array<String>) {
val context = buildContext("NUMASS", NumassPlugin::class.java, PlotManager::class.java)
val rootDir = File("D:\\Work\\Numass\\data\\2017_05\\Fill_2")
val storage = NumassStorageFactory.buildLocal(context, rootDir, true, false);
val sets = (2..14).map { "set_$it" }
val loaders = sets.mapNotNull { set ->
storage.provide("loader::$set", NumassSet::class.java).orElse(null)
}
val all = NumassDataUtils.join("sum", loaders)
val point = all.optPoint(14000.0).get()
val t0 = 20e3.toLong()
val analyzer = TimeAnalyzer()
val seconds = point.length.toMillis().toDouble() / 1000.0
val binning = 20
val plots = context.getFeature(PlotPlugin::class.java);
val meta = buildMeta {
node("window"){
"lo" to 300
"up" to 1800
}
}
with(NumassAnalyzer) {
val events = getSpectrum(seconds, analyzer.getEvents(point).asSequence(),meta)
.withBinning(binning)
val filtered = getSpectrum(
seconds,
analyzer.getEventsPairs(point, Meta.empty()).filter { it.second.timeOffset - it.first.timeOffset > t0 }.map { it.second },
meta
).withBinning(binning)
plots.getPlotFrame("amps").apply {
add(DataPlot.plot("events", ADAPTER, events))
add(DataPlot.plot("filtered", ADAPTER, filtered))
}
plots.getPlotFrame("ratio").apply {
add(
DataPlot.plot(
"ratio",
Adapters.DEFAULT_XY_ADAPTER,
events.zip(filtered) { f, s ->
Adapters.buildXYDataPoint(f.getDouble(CHANNEL_KEY), f.getDouble(COUNT_RATE_KEY) / s.getDouble(COUNT_RATE_KEY))
}
)
)
}
}
}

View File

@ -3,7 +3,6 @@ package inr.numass.viewer
import hep.dataforge.fx.dfIcon
import hep.dataforge.fx.plots.PlotContainer
import hep.dataforge.fx.runGoal
import hep.dataforge.fx.ui
import hep.dataforge.goals.Goal
import hep.dataforge.kodex.configure
import hep.dataforge.meta.Meta
@ -12,9 +11,8 @@ import hep.dataforge.plots.data.DataPlot
import hep.dataforge.plots.jfreechart.JFreeChartFrame
import hep.dataforge.tables.Adapters
import hep.dataforge.tables.Table
import inr.numass.data.NumassDataUtils
import inr.numass.data.analyzers.NumassAnalyzer
import inr.numass.data.analyzers.SimpleAnalyzer
import inr.numass.data.api.NumassAnalyzer
import inr.numass.data.api.NumassPoint
import javafx.beans.Observable
import javafx.beans.binding.DoubleBinding
@ -133,7 +131,7 @@ class AmplitudeView(
DataPlot.plot(
key,
Adapters.buildXYAdapter(NumassAnalyzer.CHANNEL_KEY, valueAxis),
NumassDataUtils.spectrumWithBinning(getSpectrum(point), binning)
NumassAnalyzer.spectrumWithBinning(getSpectrum(point), binning)
).configure {
"connectionType" to "step"
"thickness" to 2

View File

@ -11,8 +11,8 @@ import hep.dataforge.plots.data.DataPlot
import hep.dataforge.plots.jfreechart.JFreeChartFrame
import hep.dataforge.tables.Adapters
import hep.dataforge.tables.Table
import inr.numass.data.analyzers.NumassAnalyzer
import inr.numass.data.analyzers.SimpleAnalyzer
import inr.numass.data.api.NumassAnalyzer
import inr.numass.data.api.NumassPoint
import inr.numass.data.api.NumassSet
import javafx.beans.property.SimpleIntegerProperty

View File

@ -1,7 +1,7 @@
package inr.numass.viewer.test
import hep.dataforge.fx.dfIcon
import hep.dataforge.kodex.GLOBAL
import hep.dataforge.kodex.global
import hep.dataforge.tables.Table
import inr.numass.data.api.NumassPoint
import inr.numass.data.api.NumassSet
@ -37,7 +37,7 @@ class ViewerComponentsTest : View(title = "Numass viewer test", icon = ImageView
action {
runAsync {
val rootDir = File("D:\\Work\\Numass\\data\\2017_05\\Fill_2")
val set: NumassSet = NumassStorageFactory.buildLocal(GLOBAL, rootDir, true, true)
val set: NumassSet = NumassStorageFactory.buildLocal(global, rootDir, true, true)
.provide("loader::set_2", NumassSet::class.java)
.orElseThrow { RuntimeException("err") }
update(set);