Revision of numass data architecture. Viewer is working

This commit is contained in:
darksnake 2017-07-17 17:04:39 +03:00
parent 8ce3654757
commit 8dca28960b
9 changed files with 259 additions and 180 deletions

View File

@ -37,11 +37,13 @@ public abstract class AbstractAnalyzer implements NumassAnalyzer {
* @return * @return
*/ */
public Stream<NumassEvent> getEventStream(NumassBlock block, Meta config) { public Stream<NumassEvent> getEventStream(NumassBlock block, Meta config) {
if (getProcessor() == null && block.getFrames().count() > 0) {
throw new IllegalArgumentException("Signal processor needed to analyze frames");
} else {
int loChannel = config.getInt("window.lo", 0); int loChannel = config.getInt("window.lo", 0);
int upChannel = config.getInt("window.up", Integer.MAX_VALUE); int upChannel = config.getInt("window.up", Integer.MAX_VALUE);
if (block.getFrames().count() == 0) {
return block.getEvents().filter(it -> it.getChanel() >= loChannel && it.getChanel() <= upChannel);
} else if (getProcessor() == null) {
throw new IllegalArgumentException("Signal processor needed to analyze frames");
} else {
return Stream.concat(block.getEvents(), block.getFrames().flatMap(getProcessor()::analyze)) return Stream.concat(block.getEvents(), block.getFrames().flatMap(getProcessor()::analyze))
.filter(it -> it.getChanel() >= loChannel && it.getChanel() <= upChannel); .filter(it -> it.getChanel() >= loChannel && it.getChanel() <= upChannel);
} }

View File

@ -7,6 +7,7 @@ import hep.dataforge.values.Values;
import java.util.NavigableMap; import java.util.NavigableMap;
import java.util.TreeMap; import java.util.TreeMap;
import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.AtomicReference;
import java.util.stream.Stream; import java.util.stream.Stream;
import static hep.dataforge.tables.XYAdapter.*; import static hep.dataforge.tables.XYAdapter.*;
@ -31,6 +32,41 @@ public interface NumassAnalyzer {
}).mapToLong(it -> it.getValue(COUNT_KEY).numberValue().longValue()).sum(); }).mapToLong(it -> it.getValue(COUNT_KEY).numberValue().longValue()).sum();
} }
/**
* Apply window and binning to a spectrum
*
* @param lo
* @param up
* @param binSize
* @return
*/
static Table spectrumWithBinning(Table spectrum, int lo, int up, int binSize) {
TableFormat format = new TableFormatBuilder()
.addNumber(CHANNEL_KEY, X_VALUE_KEY)
.addNumber(COUNT_KEY, Y_VALUE_KEY)
.addNumber(COUNT_RATE_KEY)
.addNumber("binSize");
ListTable.Builder builder = new ListTable.Builder(format);
for (int chan = lo; chan < up - binSize; chan += binSize) {
AtomicLong count = new AtomicLong(0);
AtomicReference<Double> countRate = new AtomicReference<>(0d);
int binLo = chan;
int binUp = chan + binSize;
spectrum.getRows().filter(row -> {
int c = row.getInt(CHANNEL_KEY);
return c >= binLo && c <= binUp;
}).forEach(row -> {
count.addAndGet(row.getValue(COUNT_KEY).numberValue().longValue());
countRate.accumulateAndGet(row.getDouble(COUNT_RATE_KEY), (d1, d2) -> d1 + d2);
});
int bin = Math.min(binSize, up - chan);
builder.row((double) chan + (double) bin / 2d, count.get(), countRate.get(), bin);
}
return builder.build();
}
String CHANNEL_KEY = "channel"; String CHANNEL_KEY = "channel";
String COUNT_KEY = "count"; String COUNT_KEY = "count";
String LENGTH_KEY = "length"; String LENGTH_KEY = "length";
@ -92,8 +128,8 @@ public interface NumassAnalyzer {
new ValueMap(format.namesAsArray(), new ValueMap(format.namesAsArray(),
entry.getKey(), entry.getKey(),
entry.getValue(), entry.getValue(),
entry.getValue().get() / block.getLength().toMillis() * 1000, (double) entry.getValue().get() / block.getLength().toMillis() * 1000d,
Math.sqrt(entry.getValue().get()) / block.getLength().toMillis() * 1000 Math.sqrt(entry.getValue().get()) / block.getLength().toMillis() * 1000d
) )
) )
).build(); ).build();

View File

@ -10,8 +10,9 @@ import org.jetbrains.annotations.NotNull;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import java.io.IOException; import java.io.IOException;
import java.io.InputStream;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.channels.ReadableByteChannel;
import java.time.Duration; import java.time.Duration;
import java.time.Instant; import java.time.Instant;
import java.util.Iterator; import java.util.Iterator;
@ -30,14 +31,14 @@ public class ClassicNumassPoint implements NumassPoint {
@Override @Override
public Stream<NumassBlock> getBlocks() { public Stream<NumassBlock> getBlocks() {
double u = envelope.meta().getDouble("external_meta.HV1_value", 0); // double u = envelope.meta().getDouble("external_meta.HV1_value", 0);
long length; long length;
if (envelope.meta().hasValue("external_meta.acquisition_time")) { if (envelope.meta().hasValue("external_meta.acquisition_time")) {
length = envelope.meta().getValue("external_meta.acquisition_time").longValue(); length = envelope.meta().getValue("external_meta.acquisition_time").longValue();
} else { } else {
length = envelope.meta().getValue("acquisition_time").longValue(); length = envelope.meta().getValue("acquisition_time").longValue();
} }
return Stream.of(new ClassicBlock(getStartTime(), Duration.ofNanos(length), 0)); return Stream.of(new ClassicBlock(getStartTime(), Duration.ofSeconds(length), 0));
} }
@Override @Override
@ -49,6 +50,11 @@ public class ClassicNumassPoint implements NumassPoint {
} }
} }
@Override
public double getVoltage() {
return meta().getDouble("external_meta.HV1_value", 0);
}
@Override @Override
public Meta meta() { public Meta meta() {
return envelope.meta(); return envelope.meta();
@ -86,38 +92,40 @@ public class ClassicNumassPoint implements NumassPoint {
public Iterator<NumassEvent> iterator() { public Iterator<NumassEvent> iterator() {
double timeCoef = envelope.meta().getDouble("time_coeff", 50); double timeCoef = envelope.meta().getDouble("time_coeff", 50);
try { try {
InputStream stream = envelope.getData().getStream(); ByteBuffer buffer = ByteBuffer.allocate(7000);
stream.skip(blockOffset); buffer.order(ByteOrder.LITTLE_ENDIAN);
ReadableByteChannel channel = envelope.getData().getChannel();
channel.read(buffer);
buffer.flip();
return new Iterator<NumassEvent>() { return new Iterator<NumassEvent>() {
@Override @Override
public boolean hasNext() { public boolean hasNext() {
try { try {
return stream.available() > 0; if (buffer.hasRemaining()) {
return true;
} else {
buffer.flip();
int num = channel.read(buffer);
if (num > 0) {
buffer.flip();
return true;
} else {
return false;
}
}
} catch (IOException e) { } catch (IOException e) {
LoggerFactory.getLogger(ClassicNumassPoint.this.getClass()).error("Unexpected IOException " + LoggerFactory.getLogger(ClassicNumassPoint.this.getClass()).error("Unexpected IOException when reading block", e);
"when reading block", e);
return false; return false;
} }
} }
@Override @Override
public NumassEvent next() { public NumassEvent next() {
try {
byte[] bytes = new byte[7];
if (stream.read(bytes) < 7) {
throw new RuntimeException("Failed to read event");
}
ByteBuffer buffer = ByteBuffer.wrap(bytes);
short channel = (short) Short.toUnsignedInt(buffer.getShort()); short channel = (short) Short.toUnsignedInt(buffer.getShort());
long time = Integer.toUnsignedLong(buffer.getInt()); long time = Integer.toUnsignedLong(buffer.getInt());
byte status = buffer.get(); // status is ignored byte status = buffer.get(); // status is ignored
return new NumassEvent(channel, startTime, (long) (time * timeCoef)); return new NumassEvent(channel, startTime, (long) (time * timeCoef));
} catch (IOException ex) {
LoggerFactory.getLogger(ClassicNumassPoint.this.getClass()).error("Unexpected IOException " +
"when reading block", ex);
throw new RuntimeException(ex);
}
} }
}; };
} catch (IOException ex) { } catch (IOException ex) {
@ -125,6 +133,7 @@ public class ClassicNumassPoint implements NumassPoint {
} }
} }
@Override @Override
public Stream<NumassFrame> getFrames() { public Stream<NumassFrame> getFrames() {
return Stream.empty(); return Stream.empty();

View File

@ -20,10 +20,6 @@ import hep.dataforge.io.envelopes.EnvelopeBuilder;
import hep.dataforge.io.markup.Markedup; import hep.dataforge.io.markup.Markedup;
import hep.dataforge.io.markup.SimpleMarkupRenderer; import hep.dataforge.io.markup.SimpleMarkupRenderer;
import hep.dataforge.meta.Meta; import hep.dataforge.meta.Meta;
import hep.dataforge.tables.ListTable;
import hep.dataforge.tables.Table;
import hep.dataforge.tables.TableFormat;
import hep.dataforge.tables.TableFormatBuilder;
import hep.dataforge.values.Values; import hep.dataforge.values.Values;
import org.apache.commons.math3.analysis.UnivariateFunction; import org.apache.commons.math3.analysis.UnivariateFunction;
@ -31,13 +27,8 @@ import java.io.IOException;
import java.io.OutputStream; import java.io.OutputStream;
import java.util.HashMap; import java.util.HashMap;
import java.util.Map; import java.util.Map;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.AtomicReference;
import java.util.function.Consumer; import java.util.function.Consumer;
import static hep.dataforge.tables.XYAdapter.X_VALUE_KEY;
import static hep.dataforge.tables.XYAdapter.Y_VALUE_KEY;
import static inr.numass.data.api.NumassAnalyzer.*;
import static java.lang.Math.*; import static java.lang.Math.*;
/** /**
@ -124,34 +115,5 @@ public class NumassUtils {
writeEnvelope(stream, meta, out -> new SimpleMarkupRenderer(out).render(something.markup(meta))); writeEnvelope(stream, meta, out -> new SimpleMarkupRenderer(out).render(something.markup(meta)));
} }
/**
* Apply window and binning to a spectrum
*
* @param lo
* @param up
* @param binSize
* @return
*/
public static Table spectrumWithBinning(Table spectrum, int lo, int up, int binSize) {
TableFormat format = new TableFormatBuilder()
.addNumber(CHANNEL_KEY, X_VALUE_KEY)
.addNumber(COUNT_KEY, Y_VALUE_KEY)
.addNumber(COUNT_RATE_KEY)
.addNumber("binSize");
ListTable.Builder builder = new ListTable.Builder(format);
for (int chan = lo; chan < up - binSize; chan += binSize) {
AtomicLong count = new AtomicLong(0);
AtomicReference<Double> countRate = new AtomicReference<>(0d);
spectrum.getRows().filter(row -> {
int c = row.getInt(CHANNEL_KEY);
return c >= lo && c <= up;
}).forEach(row -> {
count.addAndGet(row.getValue(COUNT_KEY).numberValue().longValue());
countRate.accumulateAndGet(row.getDouble(COUNT_RATE_KEY), (d1, d2) -> d1 + d2);
});
int bin = Math.min(binSize, up - chan);
builder.row((double) chan + (double) bin / 2d, count.get(), countRate.get(), bin);
}
return builder.build();
}
} }

View File

@ -106,7 +106,7 @@ public class UnderflowCorrection {
if (xHigh <= xLow) { if (xHigh <= xLow) {
throw new IllegalArgumentException("Wrong borders for underflow calculation"); throw new IllegalArgumentException("Wrong borders for underflow calculation");
} }
Table binned = NumassUtils.spectrumWithBinning(spectrum, xLow, xHigh, binning); Table binned = NumassAnalyzer.spectrumWithBinning(spectrum, xLow, xHigh, binning);
List<WeightedObservedPoint> points = binned.getRows() List<WeightedObservedPoint> points = binned.getRows()
.map(p -> new WeightedObservedPoint( .map(p -> new WeightedObservedPoint(

View File

@ -15,6 +15,7 @@ import hep.dataforge.storage.api.PointLoader
import hep.dataforge.storage.api.Storage import hep.dataforge.storage.api.Storage
import hep.dataforge.storage.filestorage.FileStorageFactory import hep.dataforge.storage.filestorage.FileStorageFactory
import inr.numass.NumassProperties import inr.numass.NumassProperties
import inr.numass.data.api.NumassSet
import inr.numass.data.storage.NumassStorage import inr.numass.data.storage.NumassStorage
import javafx.application.Platform import javafx.application.Platform
import javafx.beans.property.SimpleObjectProperty import javafx.beans.property.SimpleObjectProperty
@ -104,7 +105,7 @@ class MainView : View("Numass data viewer") {
if (e.clickCount == 2) { if (e.clickCount == 2) {
val value = focusModel.focusedCell.treeItem.value val value = focusModel.focusedCell.treeItem.value
when (value.content) { when (value.content) {
is NumassData -> { is NumassSet -> {
numassLoaderView.loadData(value.content) numassLoaderView.loadData(value.content)
loaderPane.center = numassLoaderView.root loaderPane.center = numassLoaderView.root
} }
@ -253,11 +254,11 @@ class MainView : View("Numass data viewer") {
} }
fun getTime(): String { fun getTime(): String {
if (content is NumassData) { if (content is NumassSet) {
if (content.startTime() == null) { if (content.startTime == null) {
return "" return ""
} else { } else {
return content.startTime().toString() return content.startTime.toString()
} }
} else if (content is Metoid) { } else if (content is Metoid) {
return content.meta().getString("file.timeModified", "") return content.meta().getString("file.timeModified", "")

View File

@ -3,7 +3,8 @@ package inr.numass.viewer
import hep.dataforge.data.Data import hep.dataforge.data.Data
import hep.dataforge.meta.Meta import hep.dataforge.meta.Meta
import hep.dataforge.tables.Table import hep.dataforge.tables.Table
import java.time.Instant import inr.numass.data.api.NumassPoint
import inr.numass.data.api.NumassSet
import java.util.stream.Collectors import java.util.stream.Collectors
import java.util.stream.Stream import java.util.stream.Stream
@ -11,12 +12,17 @@ import java.util.stream.Stream
* Cached numass data * Cached numass data
* Created by darksnake on 23-Jun-17. * Created by darksnake on 23-Jun-17.
*/ */
class NumassDataCache(val data: NumassData) { class NumassDataCache(val data: NumassSet): NumassSet {
private val cachedDescription: String by lazy { data.description } private val cachedDescription: String by lazy { data.description }
private val cachedMeta: Meta by lazy { data.meta } private val cachedMeta: Meta by lazy { data.meta }
private val cachedPoints: List<NumassPoint> by lazy { data.stream().collect(Collectors.toList()) } private val cachedPoints: List<NumassPoint> by lazy { data.points.collect(Collectors.toList()) }
private val hv: Table by lazy { data.hvData.get() } private val hv: Table by lazy { data.hvData.get() }
override fun getPoints(): Stream<NumassPoint> {
return cachedPoints.stream();
}
override fun getDescription(): String { override fun getDescription(): String {
return cachedDescription return cachedDescription
} }
@ -25,23 +31,11 @@ class NumassDataCache(val data: NumassData) {
return cachedMeta return cachedMeta
} }
override fun stream(): Stream<NumassPoint> {
return cachedPoints.stream();
}
override fun isEmpty(): Boolean {
return data.isEmpty
}
override fun startTime(): Instant {
return data.startTime()
}
override fun getName(): String { override fun getName(): String {
return data.name; return data.name;
} }
override fun getHVData(): Data<Table> { override fun getHvData(): Data<Table> {
return Data.buildStatic(hv); return Data.buildStatic(hv);
} }
} }

View File

@ -4,6 +4,8 @@ import hep.dataforge.context.Context
import hep.dataforge.context.Global import hep.dataforge.context.Global
import hep.dataforge.fx.work.WorkManager import hep.dataforge.fx.work.WorkManager
import hep.dataforge.io.ColumnedDataWriter import hep.dataforge.io.ColumnedDataWriter
import hep.dataforge.kodex.buildMeta
import hep.dataforge.meta.Meta
import hep.dataforge.meta.MetaBuilder import hep.dataforge.meta.MetaBuilder
import hep.dataforge.plots.XYPlotFrame import hep.dataforge.plots.XYPlotFrame
import hep.dataforge.plots.data.PlotDataUtils import hep.dataforge.plots.data.PlotDataUtils
@ -13,11 +15,14 @@ import hep.dataforge.plots.data.TimePlottable
import hep.dataforge.plots.fx.PlotContainer import hep.dataforge.plots.fx.PlotContainer
import hep.dataforge.plots.jfreechart.JFreeChartFrame import hep.dataforge.plots.jfreechart.JFreeChartFrame
import hep.dataforge.storage.commons.JSONMetaWriter import hep.dataforge.storage.commons.JSONMetaWriter
import hep.dataforge.tables.ListTable import hep.dataforge.tables.Table
import hep.dataforge.tables.ValueMap import hep.dataforge.tables.ValueMap
import hep.dataforge.tables.XYAdapter import hep.dataforge.tables.XYAdapter
import hep.dataforge.values.Values import inr.numass.data.analyzers.SimpleAnalyzer
import inr.numass.data.NumassDataUtils import inr.numass.data.api.NumassAnalyzer
import inr.numass.data.api.NumassPoint
import inr.numass.data.api.NumassSet
import javafx.application.Platform
import javafx.beans.property.SimpleObjectProperty import javafx.beans.property.SimpleObjectProperty
import javafx.beans.value.ObservableValue import javafx.beans.value.ObservableValue
import javafx.collections.FXCollections import javafx.collections.FXCollections
@ -36,8 +41,8 @@ import org.controlsfx.validation.Validator
import org.slf4j.LoggerFactory import org.slf4j.LoggerFactory
import tornadofx.* import tornadofx.*
import java.io.IOException import java.io.IOException
import java.util.concurrent.atomic.AtomicInteger
import java.util.logging.Level import java.util.logging.Level
import java.util.stream.Collectors
/** /**
* Numass loader view * Numass loader view
@ -46,10 +51,10 @@ import java.util.stream.Collectors
*/ */
class NumassLoaderView : View() { class NumassLoaderView : View() {
override val root: AnchorPane by fxml("/fxml/NumassLoaderView.fxml") override val root: AnchorPane by fxml("/fxml/NumassLoaderView.fxml")
lateinit var main: MainView // lateinit var main: MainView
private val detectorPlotPane: BorderPane by fxid(); private val detectorPlotPane: BorderPane by fxid();
private val tabPane: TabPane by fxid(); // private val tabPane: TabPane by fxid();
private val infoTextBox: TextArea by fxid(); private val infoTextBox: TextArea by fxid();
private val spectrumPlotPane: BorderPane by fxid(); private val spectrumPlotPane: BorderPane by fxid();
private val lowChannelField: TextField by fxid(); private val lowChannelField: TextField by fxid();
@ -66,8 +71,14 @@ class NumassLoaderView : View() {
private val detectorNormalizeSwitch: CheckBox = CheckBox("Normailize") private val detectorNormalizeSwitch: CheckBox = CheckBox("Normailize")
private val detectorDataExportButton: Button = Button("Export") private val detectorDataExportButton: Button = Button("Export")
val dataProperty = SimpleObjectProperty<NumassData>() val dataProperty = SimpleObjectProperty<NumassSet>()
var data: NumassData? by dataProperty var data: NumassSet? by dataProperty
val analyzerProperty = SimpleObjectProperty<NumassAnalyzer>(SimpleAnalyzer())
var analyzer: NumassAnalyzer by analyzerProperty
val spectra = HashMap<Double, Table>();//spectra cache
val spectrumData = PlottableData("spectrum") val spectrumData = PlottableData("spectrum")
val hvPlotData = PlottableGroup<TimePlottable>() val hvPlotData = PlottableGroup<TimePlottable>()
@ -109,6 +120,7 @@ class NumassLoaderView : View() {
detectorNormalizeSwitch.isSelected = true detectorNormalizeSwitch.isSelected = true
detectorNormalizeSwitch.padding = Insets(5.0) detectorNormalizeSwitch.padding = Insets(5.0)
detectorPlot.plot = detectorPlotFrame
detectorPlot.addToSideBar(0, l, detectorBinningSelector, detectorNormalizeSwitch, Separator(Orientation.HORIZONTAL)) detectorPlot.addToSideBar(0, l, detectorBinningSelector, detectorNormalizeSwitch, Separator(Orientation.HORIZONTAL))
detectorDataExportButton.maxWidth = java.lang.Double.MAX_VALUE detectorDataExportButton.maxWidth = java.lang.Double.MAX_VALUE
@ -126,7 +138,9 @@ class NumassLoaderView : View() {
.setValue("yAxis.axisTitle", "count rate") .setValue("yAxis.axisTitle", "count rate")
.setValue("yAxis.axisUnits", "Hz") .setValue("yAxis.axisUnits", "Hz")
.setValue("legend.show", false) .setValue("legend.show", false)
spectrumPlot.plot = JFreeChartFrame(spectrumPlotMeta) spectrumPlot.plot = JFreeChartFrame(spectrumPlotMeta).apply {
add(spectrumData)
}
lowChannelField.textProperty().bindBidirectional(channelSlider.lowValueProperty(), NumberStringConverter()) lowChannelField.textProperty().bindBidirectional(channelSlider.lowValueProperty(), NumberStringConverter())
upChannelField.textProperty().bindBidirectional(channelSlider.highValueProperty(), NumberStringConverter()) upChannelField.textProperty().bindBidirectional(channelSlider.highValueProperty(), NumberStringConverter())
@ -182,16 +196,21 @@ class NumassLoaderView : View() {
hvPlot.plot = JFreeChartFrame(hvPlotMeta) hvPlot.plot = JFreeChartFrame(hvPlotMeta)
dataProperty.addListener { observable, oldValue, newData -> dataProperty.addListener { observable, oldValue, newData ->
//clearing spectra cache
if (oldValue != newData) {
spectra.clear()
}
if (newData != null) { if (newData != null) {
getWorkManager().startWork("viewer.numass.load") { work -> getWorkManager().startWork("viewer.numass.load") { work ->
work.title = "Load numass data (" + newData.name + ")" work.title = "Load numass data (" + newData.name + ")"
//setup info //setup info
updateInfo(newData) updateInfo(newData)
//setup spectrum plot
updateSpectrum(newData)
//setup hv plot //setup hv plot
updateHV(newData) updateHV(newData)
//setup spectrum plot
updateSpectrum(newData)
//setup detector data //setup detector data
updateDetectorPane(newData) updateDetectorPane(newData)
@ -213,15 +232,16 @@ class NumassLoaderView : View() {
return getContext().getFeature(WorkManager::class.java); return getContext().getFeature(WorkManager::class.java);
} }
fun loadData(data: NumassData?) { fun loadData(data: NumassSet?) {
this.data = if (data == null) { this.data = data;
data // this.data = if (data == null) {
} else { // data
NumassDataCache(data) // } else {
} // NumassDataCache(data)
// }
} }
private fun updateHV(data: NumassData) { private fun updateHV(data: NumassSet) {
hvPlotData.forEach { it.clear() } hvPlotData.forEach { it.clear() }
runAsync { runAsync {
data.hvData.get() data.hvData.get()
@ -239,21 +259,41 @@ class NumassLoaderView : View() {
} }
private fun updateInfo(data: NumassData) { private fun updateInfo(data: NumassSet) {
val info = data.meta() val info = data.meta()
infoTextBox.text = JSONMetaWriter().writeString(info).replace("\\r", "\r\t").replace("\\n", "\n\t") infoTextBox.text = JSONMetaWriter().writeString(info).replace("\\r", "\r\t").replace("\\n", "\n\t")
} }
private fun updateSpectrum(data: NumassData) { /**
spectrumPlot.plot.add(spectrumData) * Get energy spectrum for a specific point
*/
private fun getSpectrum(point: NumassPoint): Table {
synchronized(this) {
return spectra.computeIfAbsent(point.voltage) { analyzer.getSpectrum(point, Meta.empty()) }
}
}
val lowChannel = channelSlider.lowValue.toInt() private fun updateSpectrum(data: NumassSet) {
val highChannel = channelSlider.highValue.toInt() spectrumData.clear()
runAsync {
spectrumData.fillData(data.nmPoints.stream() val loChannel = channelSlider.lowValue.toShort()
.map { point: NumassPoint -> getSpectrumPoint(point, lowChannel, highChannel, dTime) } val upChannel = channelSlider.highValue.toShort()
.collect(Collectors.toList<Values>()) data.points.forEach { point ->
val count = NumassAnalyzer.countInWindow(getSpectrum(point), loChannel, upChannel);
val seconds = point.length.toMillis() / 1000.0;
val nuPoint = ValueMap(
mapOf(
XYAdapter.X_AXIS to point.voltage,
XYAdapter.Y_AXIS to (count / seconds),
XYAdapter.Y_ERROR_KEY to Math.sqrt(count.toDouble()) / seconds
) )
)
Platform.runLater {
spectrumData.append(nuPoint)
}
}
spectrumExportButton.isDisable = false
}
} }
private val dTime: Double private val dTime: Double
@ -263,51 +303,53 @@ class NumassLoaderView : View() {
} catch (ex: NumberFormatException) { } catch (ex: NumberFormatException) {
return 0.0 return 0.0
} }
}
private fun getSpectrumPoint(point: NumassPoint, lowChannel: Int, upChannel: Int, dTime: Double): Values {
val u = point.voltage
return ValueMap(arrayOf(XYAdapter.X_VALUE_KEY, XYAdapter.Y_VALUE_KEY, XYAdapter.Y_ERROR_KEY), u,
NumassDataUtils.countRateWithDeadTime(point, lowChannel, upChannel, dTime),
NumassDataUtils.countRateWithDeadTimeErr(point, lowChannel, upChannel, dTime))
} }
/** /**
* update detector pane with new data * update detector pane with new data
*/ */
private fun updateDetectorPane(data: NumassData) { private fun updateDetectorPane(data: NumassSet) {
val points = data.nmPoints;
Platform.runLater { detectorPlotFrame.clear() }
val work = getWorkManager().getWork("viewer.numass.load.detector") val work = getWorkManager().getWork("viewer.numass.load.detector")
work.maxProgress = points.size.toDouble() work.maxProgress = data.points.count().toDouble();
work.progress = 0.0 work.progress = 0.0
val normalize = detectorNormalizeSwitch.isSelected
val binning = detectorBinningSelector.value val binning = detectorBinningSelector.value
val valueAxis = if (detectorNormalizeSwitch.isSelected) {
NumassAnalyzer.COUNT_RATE_KEY
} else {
NumassAnalyzer.COUNT_KEY
}
/// detectorPlot.plot = detectorPlotFrame
runAsync { runAsync {
points.map { point -> val index = AtomicInteger(0);
val seriesName = String.format("%d: %.2f", points.indexOf(point), point.voltage) data.points.map { point ->
val datum = PlottableData.plot(seriesName, XYAdapter("chanel", "count"), point.getData(binning, normalize)) val seriesName = String.format("%d: %.2f", index.incrementAndGet(), point.voltage)
datum.configure(plottableConfig) PlottableData.plot(
seriesName,
XYAdapter(NumassAnalyzer.CHANNEL_KEY, valueAxis),
NumassAnalyzer.spectrumWithBinning(getSpectrum(point), 0, 4000, binning)
).apply {
configure(plottableConfig)
}
}.forEach {
work.increaseProgress(1.0) work.increaseProgress(1.0)
datum; Platform.runLater {
detectorPlotFrame.add(it)
}
} }
} ui { } ui {
//TODO do smart update here
detectorPlotFrame.setAll(it)
}
detectorPlot.plot = detectorPlotFrame
work.setProgressToMax() work.setProgressToMax()
detectorDataExportButton.isDisable = false detectorDataExportButton.isDisable = false
}
} }
private fun onSpectrumExportClick(event: ActionEvent) { private fun onSpectrumExportClick(event: ActionEvent) {
if (data != null) { if (data != null) {
val points = data!!.nmPoints
if (points.isNotEmpty()) {
val fileChooser = FileChooser() val fileChooser = FileChooser()
fileChooser.title = "Choose text export destination" fileChooser.title = "Choose text export destination"
fileChooser.initialFileName = data!!.name + "_spectrum.onComplete" fileChooser.initialFileName = data!!.name + "_spectrum.onComplete"
@ -317,20 +359,24 @@ class NumassLoaderView : View() {
val loChannel = channelSlider.lowValue.toInt() val loChannel = channelSlider.lowValue.toInt()
val upChannel = channelSlider.highValue.toInt() val upChannel = channelSlider.highValue.toInt()
val dTime = dTime val dTime = dTime
val spectrumDataSet = ListTable.Builder(*names) // val spectrumDataSet = ListTable.Builder(*names)
//
for (point in points) { // for (point in points) {
spectrumDataSet.row( // spectrumDataSet.row(
point.voltage, // point.voltage,
point.voltage, // point.voltage,
point.length, // point.length,
point.totalCount, // point.totalCount,
point.getCountInWindow(loChannel, upChannel), // point.getCountInWindow(loChannel, upChannel),
NumassDataUtils.countRateWithDeadTime(point, loChannel, upChannel, dTime), // NumassDataUtils.countRateWithDeadTime(point, loChannel, upChannel, dTime),
NumassDataUtils.countRateWithDeadTimeErr(point, loChannel, upChannel, dTime), // NumassDataUtils.countRateWithDeadTimeErr(point, loChannel, upChannel, dTime),
point.startTime // point.startTime
) // )
} // }
val spectrumDataSet = analyzer.analyze(data, buildMeta {
"window.lo" to loChannel
"window.up" to upChannel
})
try { try {
val comment = String.format("Numass data viewer spectrum data export for %s%n" val comment = String.format("Numass data viewer spectrum data export for %s%n"
@ -339,14 +385,14 @@ class NumassLoaderView : View() {
data!!.name, loChannel, upChannel, dTime) data!!.name, loChannel, upChannel, dTime)
ColumnedDataWriter ColumnedDataWriter
.writeTable(destination, spectrumDataSet.build(), comment, false) .writeTable(destination, spectrumDataSet, comment, false)
} catch (ex: IOException) { } catch (ex: IOException) {
log.log(Level.SEVERE, "Destination file not found", ex) log.log(Level.SEVERE, "Destination file not found", ex)
} }
} }
} }
}
} }
private fun onExportButtonClick(event: ActionEvent) { private fun onExportButtonClick(event: ActionEvent) {

View File

@ -0,0 +1,29 @@
package inr.numass.viewer.test
import hep.dataforge.context.Global
import hep.dataforge.fx.work.WorkManager
import inr.numass.data.api.NumassSet
import inr.numass.data.storage.NumassStorageFactory
import inr.numass.viewer.NumassLoaderView
import javafx.application.Application
import javafx.stage.Stage
import tornadofx.*
import java.io.File
/**
* Created by darksnake on 17-Jul-17.
*/
class NumassTest : App(NumassLoaderView::class) {
override fun start(stage: Stage) {
super.start(stage)
val storage = NumassStorageFactory.buildLocal(File("D:\\Work\\Numass\\data\\2017_05\\"))
Global.setDefaultContext(Global.instance())
WorkManager().startGlobal()
val view = find<NumassLoaderView>();
view.data = storage.provide("Fill_1/set_4", NumassSet::class.java).get();
}
}
fun main(args: Array<String>) {
Application.launch(NumassTest::class.java)
}