Minor fixes

This commit is contained in:
Alexander Nozik 2017-12-19 17:04:11 +03:00
parent 467f0fadfc
commit cd227e8465
10 changed files with 240 additions and 264 deletions

View File

@ -1,152 +0,0 @@
package inr.numass.data.api;
import hep.dataforge.meta.Meta;
import hep.dataforge.tables.*;
import hep.dataforge.values.Value;
import hep.dataforge.values.Values;
import inr.numass.data.analyzers.SmartAnalyzer;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.atomic.AtomicLong;
import java.util.stream.IntStream;
import java.util.stream.Stream;
import static hep.dataforge.tables.Adapters.*;
import static inr.numass.data.api.NumassPoint.HV_KEY;
/**
* A general raw data analysis utility. Could have different implementations
* Created by darksnake on 06-Jul-17.
*/
public interface NumassAnalyzer {
static NumassAnalyzer DEFAULT_ANALYZER = new SmartAnalyzer();
short MAX_CHANNEL = 10000;
/**
* Calculate number of counts in the given channel
*
* @param spectrum
* @param loChannel
* @param upChannel
* @return
*/
static long countInWindow(Table spectrum, short loChannel, short upChannel) {
return spectrum.getRows().filter(row -> {
int channel = row.getInt(CHANNEL_KEY);
return channel >= loChannel && channel < upChannel;
}).mapToLong(it -> it.getValue(COUNT_KEY).numberValue().longValue()).sum();
}
String CHANNEL_KEY = "channel";
String COUNT_KEY = "count";
String LENGTH_KEY = "length";
String COUNT_RATE_KEY = "cr";
String COUNT_RATE_ERROR_KEY = "crErr";
/**
* Perform analysis on block. The values for count rate, its error and point length in nanos must
* exist, but occasionally additional values could also be presented.
*
* @param block
* @return
*/
Values analyze(NumassBlock block, Meta config);
/**
* Analysis result for point including hv information
* @param point
* @param config
* @return
*/
default Values analyzePoint(NumassPoint point, Meta config) {
Map<String, Value> map = new HashMap<>(analyze(point, config).asMap());
map.put(HV_KEY, Value.of(point.getVoltage()));
return new ValueMap(map);
}
/**
* Return unsorted stream of events including events from frames
*
* @param block
* @return
*/
Stream<NumassEvent> getEvents(NumassBlock block, Meta meta);
/**
* Analyze the whole set. And return results as a table
*
* @param set
* @param config
* @return
*/
Table analyzeSet(NumassSet set, Meta config);
/**
* Calculate the energy spectrum for a given block. The s
*
* @param block
* @param config
* @return
*/
default Table getSpectrum(NumassBlock block, Meta config) {
TableFormat format = new TableFormatBuilder()
.addNumber(CHANNEL_KEY, X_VALUE_KEY)
.addNumber(COUNT_KEY)
.addNumber(COUNT_RATE_KEY, Y_VALUE_KEY)
.addNumber(COUNT_RATE_ERROR_KEY, Y_ERROR_KEY)
.updateMeta(metaBuilder -> metaBuilder.setNode("config", config))
.build();
//optimized for fastest computation
//TODO requires additional performance optimization
AtomicLong[] spectrum = new AtomicLong[MAX_CHANNEL];
getEvents(block, config).forEach(event -> {
if (spectrum[event.getChanel()] == null) {
spectrum[event.getChanel()] = new AtomicLong(1);
} else {
spectrum[event.getChanel()].incrementAndGet();
}
});
double seconds = (double) block.getLength().toMillis() / 1000d;
return new ListTable.Builder(format)
.rows(IntStream.range(0, MAX_CHANNEL)
.filter(i -> spectrum[i] != null)
.mapToObj(i -> {
long value = spectrum[i].get();
return ValueMap.of(
format.namesAsArray(),
i,
value,
(double) value / seconds,
Math.sqrt(value) / seconds
);
})
).build();
}
/**
* Get the approximate number of events in block. Not all analyzers support precise event counting
*
* @param block
* @param config
* @return
*/
default long getCount(NumassBlock block, Meta config) {
return analyze(block, config).getValue(COUNT_KEY).numberValue().longValue();
}
/**
* Get approximate effective point length in nanos. It is not necessary corresponds to real point length.
*
* @param block
* @param config
* @return
*/
default long getLength(NumassBlock block, Meta config) {
return analyze(block, config).getValue(LENGTH_KEY).numberValue().longValue();
}
}

View File

@ -6,6 +6,7 @@ import inr.numass.data.api.NumassBlock;
import inr.numass.data.api.NumassEvent;
import inr.numass.data.api.NumassFrame;
import inr.numass.data.api.NumassPoint;
import inr.numass.data.legacy.NumassFileEnvelope;
import org.jetbrains.annotations.NotNull;
import org.slf4j.LoggerFactory;
@ -13,6 +14,7 @@ import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.channels.ReadableByteChannel;
import java.nio.file.Path;
import java.time.Duration;
import java.time.Instant;
import java.util.Iterator;
@ -23,6 +25,10 @@ import java.util.stream.StreamSupport;
* Created by darksnake on 08.07.2017.
*/
public class ClassicNumassPoint implements NumassPoint {
public static ClassicNumassPoint readFile(Path path) {
return new ClassicNumassPoint(NumassFileEnvelope.open(path, true));
}
private final Envelope envelope;
public ClassicNumassPoint(Envelope envelope) {

View File

@ -81,7 +81,7 @@ public class ProtoNumassPoint implements NumassPoint {
@Override
public Duration getLength() {
return Duration.ofNanos((long) (getMeta().getInt("b_size") / getMeta().getInt("sample_freq") * 1e9));
return Duration.ofNanos((long) (getMeta().getDouble("params.b_size") / getMeta().getDouble("params.sample_freq") * 1e9));
}
@Override

View File

@ -169,9 +169,14 @@ class TimeAnalyzer @JvmOverloads constructor(private val processor: SignalProces
* @return
*/
fun getEventsWithDelay(block: NumassBlock, config: Meta): Stream<Pair<NumassEvent, Long>> {
val inverted = config.getBoolean("inverted",false)
return super.getEvents(block, config).asSequence().zipWithNext { prev, next ->
val delay = Math.max(next.timeOffset - prev.timeOffset, 0)
if(inverted){
Pair(next, delay)
} else {
Pair(prev, delay)
}
}.asStream()
}

View File

@ -19,7 +19,6 @@ import hep.dataforge.actions.ActionManager
import hep.dataforge.context.BasicPlugin
import hep.dataforge.context.Context
import hep.dataforge.context.PluginDef
import hep.dataforge.fx.plots.PlotContainer
import hep.dataforge.maths.MathPlugin
import hep.dataforge.meta.Meta
import hep.dataforge.plots.jfreechart.JFreeChartFrame
@ -280,6 +279,6 @@ class NumassPlugin : BasicPlugin() {
fun displayJFreeChart(title: String, width: Double = 800.0, height: Double = 600.0, meta: Meta = Meta.empty()): JFreeChartFrame {
val frame = JFreeChartFrame(meta)
frame.configureValue("title", title)
PlotContainer.display(frame, title, width, height)
// PlotContainer.show(frame, title, width, height)
return frame
}

View File

@ -8,6 +8,7 @@ import org.apache.commons.math3.random.RandomGenerator
import java.time.Duration
import java.time.Instant
import java.util.*
import kotlin.collections.ArrayList
interface ChainGenerator {
@ -30,27 +31,84 @@ interface ChainGenerator {
}
}
private fun RandomGenerator.nextExp(mean: Double): Double {
return -mean * Math.log(1 - nextDouble())
}
private fun RandomGenerator.nextDeltaTime(cr: Double): Long {
return (nextExp(1.0 / cr) * 1e9).toLong()
}
class SimpleChainGenerator(
val cr: Double,
private val rnd: RandomGenerator = JDKRandomGenerator(),
private val amp: (Long) -> Short = { 1 }
private val amp: RandomGenerator.(NumassEvent?, Long) -> Short = { _, _ -> ((nextDouble() + 2.0) * 100).toShort() }
) : ChainGenerator {
override fun next(event: NumassEvent?): NumassEvent {
return if (event == null) {
NumassEvent(amp(0), Instant.EPOCH, 0)
NumassEvent(rnd.amp(null, 0), Instant.EPOCH, 0)
} else {
val deltaT = generateDeltaTime()
NumassEvent(amp(deltaT), event.blockTime, event.timeOffset + deltaT)
val deltaT = rnd.nextDeltaTime(cr)
NumassEvent(rnd.amp(event, deltaT), event.blockTime, event.timeOffset + deltaT)
}
}
private fun nextExpDecay(mean: Double): Double {
return -mean * Math.log(1 - rnd.nextDouble())
}
private fun generateDeltaTime(): Long {
return (nextExpDecay(1.0 / cr) * 1e9).toLong()
}
}
class BunchGenerator(
private val cr: Double,
private val bunchRate: Double,
private val bunchLength: RandomGenerator.() -> Long,
private val rnd: RandomGenerator = JDKRandomGenerator(),
private val amp: RandomGenerator.(NumassEvent?, Long) -> Short = { _, _ -> ((nextDouble() + 2.0) * 100).toShort() }
) : ChainGenerator {
private val internalGenerator = SimpleChainGenerator(cr, rnd, amp)
var bunchStart: Long = 0
var bunchEnd: Long = 0
override fun next(event: NumassEvent?): NumassEvent {
if (event?.timeOffset ?: 0 >= bunchEnd) {
bunchStart = bunchEnd + rnd.nextExp(bunchRate).toLong()
bunchEnd = bunchStart + rnd.bunchLength()
return NumassEvent(rnd.amp(null, 0), Instant.EPOCH, bunchStart)
} else {
return internalGenerator.next(event)
}
}
override fun generateBlock(start: Instant, length: Long, filter: (NumassEvent, NumassEvent) -> Boolean): NumassBlock {
bunchStart = 0
bunchEnd = 0
return super.generateBlock(start, length, filter)
}
}
class MergingGenerator(private vararg val generators: ChainGenerator) : ChainGenerator {
private val waiting: TreeSet<Pair<ChainGenerator, NumassEvent>> = TreeSet(Comparator.comparing<Pair<ChainGenerator, NumassEvent>, Long> { it.second.timeOffset })
init {
generators.forEach { generator ->
waiting.add(Pair(generator, generator.next(null)))
}
}
override fun next(event: NumassEvent?): NumassEvent {
val pair = waiting.first()
waiting.remove(pair)
waiting.add(Pair(pair.first, pair.first.next(pair.second)))
return pair.second
}
override fun generateBlock(start: Instant, length: Long, filter: (NumassEvent, NumassEvent) -> Boolean): NumassBlock {
generators.forEach { generator ->
waiting.add(Pair(generator, generator.next(null)))
}
return super.generateBlock(start, length, filter)
}
}

View File

@ -0,0 +1,46 @@
package inr.numass.scripts
import hep.dataforge.fx.plots.PlotManager
import hep.dataforge.kodex.buildContext
import hep.dataforge.kodex.buildMeta
import inr.numass.NumassPlugin
import inr.numass.data.BunchGenerator
import inr.numass.data.MergingGenerator
import inr.numass.data.SimpleChainGenerator
import inr.numass.data.analyzers.NumassAnalyzer
import inr.numass.data.analyzers.SmartAnalyzer
import inr.numass.data.api.SimpleNumassPoint
import java.time.Instant
fun main(args: Array<String>) {
val context = buildContext("NUMASS", NumassPlugin::class.java, PlotManager::class.java)
val cr = 10.0
val length = 30e9.toLong()
val num = 50;
val dt = 6.5
val regularGenerator = SimpleChainGenerator(cr)
val bunchGenerator = BunchGenerator(40.0, 0.1, { 2e9.toLong() })
val generator = MergingGenerator(regularGenerator, bunchGenerator)
val blocks = (1..num).map {
generator.generateBlock(Instant.now().plusNanos(it * length), length)
}
val point = SimpleNumassPoint(10000.0, blocks)
val meta = buildMeta {
"t0.crFraction" to 0.1
}
println("actual count rate: ${point.events.count() / point.length.seconds}")
val res = SmartAnalyzer().analyze(point,meta)
.getDouble(NumassAnalyzer.COUNT_RATE_KEY)
println("estimated count rate: ${res}")
}

View File

@ -16,77 +16,72 @@
package inr.numass.scripts
import hep.dataforge.description.DescriptorUtils
import hep.dataforge.fx.plots.PlotManager
import hep.dataforge.kodex.buildContext
import hep.dataforge.kodex.buildMeta
import hep.dataforge.kodex.replaceColumn
import hep.dataforge.plots.data.DataPlot
import hep.dataforge.tables.Table
import inr.numass.NumassPlugin
import inr.numass.data.NumassDataUtils
import inr.numass.data.analyzers.NumassAnalyzer
import inr.numass.data.analyzers.TimeAnalyzer
import inr.numass.data.analyzers.getAmplitudeSpectrum
import inr.numass.data.api.NumassPoint
import inr.numass.data.analyzers.SmartAnalyzer
import inr.numass.data.api.NumassSet
import inr.numass.data.storage.NumassStorageFactory
fun main(args: Array<String>) {
val context = buildContext("NUMASS", NumassPlugin::class.java, PlotManager::class.java) {
rootDir = "D:\\Work\\Numass\\sterile\\2017_05"
dataDir = "D:\\Work\\Numass\\data\\2017_05"
rootDir = "D:\\Work\\Numass\\sterile\\2017_11"
dataDir = "D:\\Work\\Numass\\data\\2017_11"
}
//val rootDir = File("D:\\Work\\Numass\\data\\2017_05\\Fill_2")
val storage = NumassStorageFactory.buildLocal(context, "Fill_2", true, false);
val sets = (2..14).map { "set_$it" }
val sets = (1..24).map { "set_$it" }
val loaders = sets.mapNotNull { set ->
storage.provide("loader::$set", NumassSet::class.java).orElse(null)
}
val analyzer = SmartAnalyzer()
val all = NumassDataUtils.join("sum", loaders)
val t0 = 20e3
val meta = buildMeta {
"t0" to 30e3
"inverted" to true
"window.lo" to 400
"window.up" to 1800
"t0" to t0
}
val filter: (NumassPoint) -> Table = { point ->
val analyzer = TimeAnalyzer()
val sequence = analyzer.zipEvents(point, meta).filter { it.second.timeOffset - it.first.timeOffset > t0 }.map { it.second }
//val sequence = analyzer.getEvents(point,meta).asSequence()
getAmplitudeSpectrum(sequence, point.length.toMillis().toDouble() / 1000.0, meta)
"window.up" to 1600
}
val plots = context.getFeature(PlotManager::class.java)
val frame = plots.getPlotFrame("differential")
val frame = plots.getPlotFrame("differential").apply {
this.plots.descriptor = DescriptorUtils.buildDescriptor(DataPlot::class)
this.plots.configureValue("showLine", true)
}
val integralFrame = plots.getPlotFrame("integral")
for (hv in arrayOf(14000.0, 14500.0, 15000.0, 15500.0)) {
val basePoint = all.optPoint(hv).get()
for (hv in arrayOf(14000.0, 14500.0, 15000.0, 15500.0, 16050.0)) {
val point1 = all.optPoint(hv).get()
val subPoint = all.optPoint(hv + 200.0).get()
val point0 = all.optPoint(hv + 200.0).get()
with(NumassAnalyzer) {
val baseSpectrum = filter(basePoint).withBinning(50)
val spectrum1 = analyzer.getAmplitudeSpectrum(point1, meta).withBinning(50)
val subSpectrum = filter(subPoint).withBinning(50)
val spectrum0 = analyzer.getAmplitudeSpectrum(point0, meta).withBinning(50)
val res = subtractAmplitudeSpectrum(baseSpectrum, subSpectrum)
val res = subtractAmplitudeSpectrum(spectrum1, spectrum0)
val norm = res.getColumn(COUNT_RATE_KEY).stream().mapToDouble { it.doubleValue() }.sum()
integralFrame.add(DataPlot.plot("point_$hv", AMPLITUDE_ADAPTER, baseSpectrum))
integralFrame.add(DataPlot.plot("point_$hv", AMPLITUDE_ADAPTER, spectrum0))
frame.add(DataPlot.plot("point_$hv", AMPLITUDE_ADAPTER, res.replaceColumn(COUNT_RATE_KEY) { getDouble(COUNT_RATE_KEY) / norm }))
}

View File

@ -16,108 +16,66 @@
package inr.numass.scripts
import hep.dataforge.description.DescriptorUtils
import hep.dataforge.fx.plots.PlotManager
import hep.dataforge.kodex.buildContext
import hep.dataforge.kodex.buildMeta
import hep.dataforge.kodex.replaceColumn
import hep.dataforge.meta.Meta
import hep.dataforge.plots.PlotPlugin
import hep.dataforge.plots.data.DataPlot
import inr.numass.NumassPlugin
import inr.numass.data.NumassDataUtils
import inr.numass.data.analyzers.NumassAnalyzer
import inr.numass.data.analyzers.TimeAnalyzer
import inr.numass.data.analyzers.getAmplitudeSpectrum
import inr.numass.data.analyzers.NumassAnalyzer.Companion.AMPLITUDE_ADAPTER
import inr.numass.data.analyzers.NumassAnalyzer.Companion.withBinning
import inr.numass.data.analyzers.SmartAnalyzer
import inr.numass.data.api.NumassSet
import inr.numass.data.storage.NumassStorageFactory
import kotlin.streams.asSequence
fun main(args: Array<String>) {
val context = buildContext("NUMASS", NumassPlugin::class.java, PlotManager::class.java){
rootDir = "D:\\Work\\Numass\\sterile2017_05"
val context = buildContext("NUMASS", NumassPlugin::class.java, PlotManager::class.java) {
rootDir = "D:\\Work\\Numass\\sterile\\2017_11"
dataDir = "D:\\Work\\Numass\\data\\2017_11"
}
//val rootDir = File("D:\\Work\\Numass\\data\\2017_05\\Fill_2")
val storage = NumassStorageFactory.buildLocal(context, "D:\\Work\\Numass\\data\\2017_05\\Fill_2", true, false);
val storage = NumassStorageFactory.buildLocal(context, "Fill_2", true, false);
val sets = (2..14).map { "set_$it" }
val sets = (10..24).map { "set_$it" }
val loaders = sets.mapNotNull { set ->
storage.provide("loader::$set", NumassSet::class.java).orElse(null)
}
val all = NumassDataUtils.join("sum", loaders)
val point = all.optPoint(14000.0).get()
val t0 = 20e3.toLong()
val analyzer = TimeAnalyzer()
val seconds = point.length.toMillis().toDouble() / 1000.0
val binning = 20
val set = NumassDataUtils.join("sum", loaders)
val plots = context.getFeature(PlotPlugin::class.java);
val analyzer = SmartAnalyzer()
val meta = buildMeta {
node("window"){
"lo" to 300
"up" to 2600
}
// "t0" to 30e3
// "inverted" to true
"window.lo" to 400
"window.up" to 1600
}
with(NumassAnalyzer) {
val events = getAmplitudeSpectrum(analyzer.getEvents(point).asSequence(), seconds, meta)
.withBinning(binning)
val metaForChain = meta.builder.setValue("t0", 15e3)
val eventsNorming = events.getColumn(COUNT_RATE_KEY).stream().mapToDouble{it.doubleValue()}.sum()
println("The norming factor for unfiltered count rate is $eventsNorming")
val filtered = getAmplitudeSpectrum(
analyzer.zipEvents(point, Meta.empty()).filter { it.second.timeOffset - it.first.timeOffset > t0 }.map { it.second },
seconds,
meta
).withBinning(binning)
val filteredNorming = filtered.getColumn(COUNT_RATE_KEY).stream().mapToDouble{it.doubleValue()}.sum()
println("The norming factor for filtered count rate is $filteredNorming")
val defaultFiltered = getAmplitudeSpectrum(
analyzer.getEvents(point, buildMeta {"t0" to t0}).asSequence(),
seconds,
meta
).withBinning(binning)
val defaultFilteredNorming = defaultFiltered.getColumn(COUNT_RATE_KEY).stream().mapToDouble{it.doubleValue()}.sum()
println("The norming factor for default filtered count rate is $defaultFilteredNorming")
val metaForChainInverted = metaForChain.builder.setValue("inverted", true)
plots.getPlotFrame("amps").apply {
add(DataPlot.plot("events", AMPLITUDE_ADAPTER, events.replaceColumn(COUNT_RATE_KEY){getDouble(COUNT_RATE_KEY)/eventsNorming}))
add(DataPlot.plot("filtered", AMPLITUDE_ADAPTER, filtered.replaceColumn(COUNT_RATE_KEY){getDouble(COUNT_RATE_KEY)/filteredNorming}))
add(DataPlot.plot("defaultFiltered", AMPLITUDE_ADAPTER, defaultFiltered.replaceColumn(COUNT_RATE_KEY){getDouble(COUNT_RATE_KEY)/defaultFilteredNorming}))
val plots = context.getFeature(PlotManager::class.java)
for (hv in arrayOf(14000.0, 14500.0, 15000.0, 15500.0, 16050.0)) {
val frame = plots.getPlotFrame("integral[$hv]").apply {
this.plots.descriptor = DescriptorUtils.buildDescriptor(DataPlot::class)
this.plots.configureValue("showLine", true)
}
// plots.getPlotFrame("ratio").apply {
//
// add(
// DataPlot.plot(
// "ratio",
// Adapters.DEFAULT_XY_ADAPTER,
// events.zip(filtered) { f, s ->
// Adapters.buildXYDataPoint(f.getDouble(CHANNEL_KEY), f.getDouble(COUNT_RATE_KEY) / s.getDouble(COUNT_RATE_KEY))
// }
// )
// )
// }
val point = set.optPoint(hv).get()
frame.add(DataPlot.plot("raw", AMPLITUDE_ADAPTER, analyzer.getAmplitudeSpectrum(point, meta).withBinning(20)))
frame.add(DataPlot.plot("filtered", AMPLITUDE_ADAPTER, analyzer.getAmplitudeSpectrum(point, metaForChain).withBinning(20)))
frame.add(DataPlot.plot("invertedFilter", AMPLITUDE_ADAPTER, analyzer.getAmplitudeSpectrum(point, metaForChainInverted).withBinning(20)))
}
}

View File

@ -0,0 +1,61 @@
/*
* Copyright 2017 Alexander Nozik.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package inr.numass.scripts
import hep.dataforge.description.DescriptorUtils
import hep.dataforge.fx.plots.PlotManager
import hep.dataforge.kodex.buildContext
import hep.dataforge.kodex.buildMeta
import hep.dataforge.plots.data.DataPlot
import inr.numass.NumassPlugin
import inr.numass.data.analyzers.NumassAnalyzer.Companion.AMPLITUDE_ADAPTER
import inr.numass.data.analyzers.NumassAnalyzer.Companion.withBinning
import inr.numass.data.analyzers.SmartAnalyzer
import inr.numass.data.storage.ProtoNumassPoint
import java.nio.file.Paths
fun main(args: Array<String>) {
val context = buildContext("NUMASS", NumassPlugin::class.java, PlotManager::class.java)
val analyzer = SmartAnalyzer()
val meta = buildMeta {
"window.lo" to 800
"window.up" to 5600
}
val metaForChain = meta.builder.setValue("t0", 15e3)
val metaForChainInverted = metaForChain.builder.setValue("inverted", true)
val plots = context.getFeature(PlotManager::class.java)
val point = ProtoNumassPoint.readFile(Paths.get("D:\\Work\\Numass\\data\\2017_05_frames\\Fill_3_events\\set_33\\p36(30s)(HV1=17000).df"))
val frame = plots.getPlotFrame("integral").apply {
this.plots.descriptor = DescriptorUtils.buildDescriptor(DataPlot::class)
this.plots.configureValue("showLine", true)
}
frame.add(DataPlot.plot("raw", AMPLITUDE_ADAPTER, analyzer.getAmplitudeSpectrum(point, meta).withBinning(80)))
frame.add(DataPlot.plot("filtered", AMPLITUDE_ADAPTER, analyzer.getAmplitudeSpectrum(point, metaForChain).withBinning(80)))
frame.add(DataPlot.plot("invertedFilter", AMPLITUDE_ADAPTER, analyzer.getAmplitudeSpectrum(point, metaForChainInverted).withBinning(80)))
}