Faster file type infer

This commit is contained in:
Alexander Nozik 2018-08-15 18:53:05 +03:00
parent 7ad1ef6484
commit 2af3659d3c
3 changed files with 18 additions and 21 deletions

View File

@ -103,14 +103,12 @@ class NumassEnvelopeType : EnvelopeType {
return try { return try {
FileChannel.open(path, StandardOpenOption.READ).use { FileChannel.open(path, StandardOpenOption.READ).use {
val buffer = it.map(FileChannel.MapMode.READ_ONLY, 0, 6) val buffer = it.map(FileChannel.MapMode.READ_ONLY, 0, 6)
val array = ByteArray(6)
buffer.get(array)
val header = String(array)
when { when {
//TODO use templates from appropriate types //TODO use templates from appropriate types
header.startsWith("#!") -> NumassEnvelopeType.INSTANCE buffer.get(0) == '#'.toByte() && buffer.get(1) == '!'.toByte() -> NumassEnvelopeType.INSTANCE
header.startsWith("#~DFTL") -> TaglessEnvelopeType.INSTANCE buffer.get(0) == '#'.toByte() && buffer.get(1) == '!'.toByte() &&
header.startsWith("#~") -> DefaultEnvelopeType.INSTANCE buffer.get(4) == 'T'.toByte() && buffer.get(5) == 'L'.toByte() -> TaglessEnvelopeType.INSTANCE
buffer.get(0) == '#'.toByte() && buffer.get(1) == '~'.toByte() -> DefaultEnvelopeType.INSTANCE
else -> null else -> null
} }
} }

View File

@ -61,13 +61,13 @@ class TimeAnalyzer(processor: SignalProcessor? = null) : AbstractAnalyzer(proces
val chunkSize = config.getInt("chunkSize", -1) val chunkSize = config.getInt("chunkSize", -1)
val res = if(chunkSize>0) { val res = if (chunkSize > 0) {
getEventsWithDelay(block, config) getEventsWithDelay(block, config)
.chunked(chunkSize) { analyzeSequence(it.asSequence(), t0) } .chunked(chunkSize) { analyzeSequence(it.asSequence(), t0) }
.toList() .toList()
.mean(config.getEnum("mean", WEIGHTED)) .mean(config.getEnum("mean", WEIGHTED))
} else{ } else {
analyzeSequence(getEventsWithDelay(block, config),t0) analyzeSequence(getEventsWithDelay(block, config), t0)
} }
return ValueMap.Builder(res) return ValueMap.Builder(res)
@ -126,7 +126,7 @@ class TimeAnalyzer(processor: SignalProcessor? = null) : AbstractAnalyzer(proces
*/ */
private fun List<Values>.mean(method: AveragingMethod): Values { private fun List<Values>.mean(method: AveragingMethod): Values {
if(this.isEmpty()){ if (this.isEmpty()) {
return ValueMap.Builder() return ValueMap.Builder()
.putValue(LENGTH_KEY, 0) .putValue(LENGTH_KEY, 0)
.putValue(COUNT_KEY, 0) .putValue(COUNT_KEY, 0)

View File

@ -8,7 +8,6 @@ import hep.dataforge.plots.jfreechart.JFreeChartPlugin
import inr.numass.NumassPlugin import inr.numass.NumassPlugin
import inr.numass.actions.TimeAnalyzerAction import inr.numass.actions.TimeAnalyzerAction
import inr.numass.data.NumassDataUtils import inr.numass.data.NumassDataUtils
import inr.numass.data.analyzers.TimeAnalyzer
import inr.numass.data.api.NumassPoint import inr.numass.data.api.NumassPoint
import inr.numass.data.api.NumassSet import inr.numass.data.api.NumassSet
import inr.numass.data.api.SimpleNumassPoint import inr.numass.data.api.SimpleNumassPoint
@ -19,38 +18,38 @@ fun main(args: Array<String>) {
val context = buildContext("NUMASS", NumassPlugin::class.java, JFreeChartPlugin::class.java) { val context = buildContext("NUMASS", NumassPlugin::class.java, JFreeChartPlugin::class.java) {
output = FXOutputManager() output = FXOutputManager()
rootDir = "D:\\Work\\Numass\\sterile2018_04" rootDir = "D:\\Work\\Numass\\sterile2017_11"
dataDir = "D:\\Work\\Numass\\data\\2018_04" dataDir = "D:\\Work\\Numass\\data\\2017_11"
} }
val storage = NumassDirectory.read(context, "Fill_4")!! val storage = NumassDirectory.read(context, "Fill_2")!!
val meta = buildMeta { val meta = buildMeta {
"t0" to 3000 "t0" to 3000
"binNum" to 200 "binNum" to 200
"t0Step" to 100 "t0Step" to 600
"chunkSize" to 3000 //"chunkSize" to 3000
"mean" to TimeAnalyzer.AveragingMethod.ARITHMETIC //"mean" to TimeAnalyzer.AveragingMethod.ARITHMETIC
//"separateParallelBlocks" to true //"separateParallelBlocks" to true
"window" to { "window" to {
"lo" to 0 "lo" to 400
"up" to 4000 "up" to 4000
} }
//"plot.showErrors" to false //"plot.showErrors" to false
} }
//def sets = ((2..14) + (22..31)).collect { "set_$it" } //def sets = ((2..14) + (22..31)).collect { "set_$it" }
val sets = (2..12).map { "set_$it" } val sets = (22..22).map { "set_$it" }
//def sets = (16..31).collect { "set_$it" } //def sets = (16..31).collect { "set_$it" }
//def sets = (20..28).collect { "set_$it" } //def sets = (20..28).collect { "set_$it" }
val loaders = sets.map { set -> val loaders = sets.map { set ->
storage.provide("loader::$set", NumassSet::class.java).orElse(null) storage.provide(set, NumassSet::class.java).orElse(null)
}.filter { it != null } }.filter { it != null }
val all = NumassDataUtils.join("sum", loaders) val all = NumassDataUtils.join("sum", loaders)
val hvs = listOf(12000.0, 14000.0, 16000.0)//, 15000d, 15200d, 15400d, 15600d, 15800d] val hvs = listOf(14000.0, 15000.0)//, 15000d, 15200d, 15400d, 15600d, 15800d]
//listOf(18500.0, 18600.0, 18995.0, 19000.0) //listOf(18500.0, 18600.0, 18995.0, 19000.0)
val data = DataSet.edit(NumassPoint::class).apply { val data = DataSet.edit(NumassPoint::class).apply {