Viewer redone

This commit is contained in:
Alexander Nozik 2017-04-15 22:15:53 +03:00
parent 30c912bd3a
commit 9a1de9b6f2
116 changed files with 5806 additions and 1996 deletions

View File

@ -11,3 +11,5 @@ private/*
.nb-gradle/*
.idea/
*.iml
/numass-core/gen/

BIN
gradle/wrapper/gradle-wrapper.jar vendored Normal file

Binary file not shown.

View File

@ -0,0 +1,6 @@
#Wed Apr 05 18:15:59 MSK 2017
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-3.4.1-all.zip

172
gradlew vendored Normal file
View File

@ -0,0 +1,172 @@
#!/usr/bin/env sh
##############################################################################
##
## Gradle start up script for UN*X
##
##############################################################################
# Attempt to set APP_HOME
# Resolve links: $0 may be a link
PRG="$0"
# Need this for relative symlinks.
while [ -h "$PRG" ] ; do
ls=`ls -ld "$PRG"`
link=`expr "$ls" : '.*-> \(.*\)$'`
if expr "$link" : '/.*' > /dev/null; then
PRG="$link"
else
PRG=`dirname "$PRG"`"/$link"
fi
done
SAVED="`pwd`"
cd "`dirname \"$PRG\"`/" >/dev/null
APP_HOME="`pwd -P`"
cd "$SAVED" >/dev/null
APP_NAME="Gradle"
APP_BASE_NAME=`basename "$0"`
# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
DEFAULT_JVM_OPTS=""
# Use the maximum available, or set MAX_FD != -1 to use that value.
MAX_FD="maximum"
warn ( ) {
echo "$*"
}
die ( ) {
echo
echo "$*"
echo
exit 1
}
# OS specific support (must be 'true' or 'false').
cygwin=false
msys=false
darwin=false
nonstop=false
case "`uname`" in
CYGWIN* )
cygwin=true
;;
Darwin* )
darwin=true
;;
MINGW* )
msys=true
;;
NONSTOP* )
nonstop=true
;;
esac
CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
# Determine the Java command to use to start the JVM.
if [ -n "$JAVA_HOME" ] ; then
if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
# IBM's JDK on AIX uses strange locations for the executables
JAVACMD="$JAVA_HOME/jre/sh/java"
else
JAVACMD="$JAVA_HOME/bin/java"
fi
if [ ! -x "$JAVACMD" ] ; then
die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
else
JAVACMD="java"
which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
# Increase the maximum file descriptors if we can.
if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then
MAX_FD_LIMIT=`ulimit -H -n`
if [ $? -eq 0 ] ; then
if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
MAX_FD="$MAX_FD_LIMIT"
fi
ulimit -n $MAX_FD
if [ $? -ne 0 ] ; then
warn "Could not set maximum file descriptor limit: $MAX_FD"
fi
else
warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
fi
fi
# For Darwin, add options to specify how the application appears in the dock
if $darwin; then
GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
fi
# For Cygwin, switch paths to Windows format before running java
if $cygwin ; then
APP_HOME=`cygpath --path --mixed "$APP_HOME"`
CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
JAVACMD=`cygpath --unix "$JAVACMD"`
# We build the pattern for arguments to be converted via cygpath
ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
SEP=""
for dir in $ROOTDIRSRAW ; do
ROOTDIRS="$ROOTDIRS$SEP$dir"
SEP="|"
done
OURCYGPATTERN="(^($ROOTDIRS))"
# Add a user-defined pattern to the cygpath arguments
if [ "$GRADLE_CYGPATTERN" != "" ] ; then
OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
fi
# Now convert the arguments - kludge to limit ourselves to /bin/sh
i=0
for arg in "$@" ; do
CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
else
eval `echo args$i`="\"$arg\""
fi
i=$((i+1))
done
case $i in
(0) set -- ;;
(1) set -- "$args0" ;;
(2) set -- "$args0" "$args1" ;;
(3) set -- "$args0" "$args1" "$args2" ;;
(4) set -- "$args0" "$args1" "$args2" "$args3" ;;
(5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
(6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
(7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
(8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
(9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
esac
fi
# Escape application args
save ( ) {
for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done
echo " "
}
APP_ARGS=$(save "$@")
# Collect all arguments for the java command, following the shell quoting and substitution rules
eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS"
# by default we should be in the correct project dir, but when run from Finder on Mac, the cwd is wrong
if [ "$(uname)" = "Darwin" ] && [ "$HOME" = "$PWD" ]; then
cd "$(dirname "$0")"
fi
exec "$JAVACMD" "$@"

84
gradlew.bat vendored Normal file
View File

@ -0,0 +1,84 @@
@if "%DEBUG%" == "" @echo off
@rem ##########################################################################
@rem
@rem Gradle startup script for Windows
@rem
@rem ##########################################################################
@rem Set local scope for the variables with windows NT shell
if "%OS%"=="Windows_NT" setlocal
set DIRNAME=%~dp0
if "%DIRNAME%" == "" set DIRNAME=.
set APP_BASE_NAME=%~n0
set APP_HOME=%DIRNAME%
@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
set DEFAULT_JVM_OPTS=
@rem Find java.exe
if defined JAVA_HOME goto findJavaFromJavaHome
set JAVA_EXE=java.exe
%JAVA_EXE% -version >NUL 2>&1
if "%ERRORLEVEL%" == "0" goto init
echo.
echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
goto fail
:findJavaFromJavaHome
set JAVA_HOME=%JAVA_HOME:"=%
set JAVA_EXE=%JAVA_HOME%/bin/java.exe
if exist "%JAVA_EXE%" goto init
echo.
echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
goto fail
:init
@rem Get command-line arguments, handling Windows variants
if not "%OS%" == "Windows_NT" goto win9xME_args
:win9xME_args
@rem Slurp the command line arguments.
set CMD_LINE_ARGS=
set _SKIP=2
:win9xME_args_slurp
if "x%~1" == "x" goto execute
set CMD_LINE_ARGS=%*
:execute
@rem Setup the command line
set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
@rem Execute Gradle
"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
:end
@rem End local scope for the variables with windows NT shell
if "%ERRORLEVEL%"=="0" goto mainEnd
:fail
rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
rem the _cmd.exe /c_ return code!
if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
exit /b 1
:mainEnd
if "%OS%"=="Windows_NT" endlocal
:omega

View File

@ -7,7 +7,7 @@ if (!hasProperty('mainClass')) {
mainClassName = mainClass
dependencies {
compile project(':numass-storage')
compile project(':numass-core')
compile 'commons-cli:commons-cli:1.3.1'
compile 'org.zeroturnaround:zt-zip:1.9'
}

View File

@ -6,7 +6,7 @@ configurations {
}
dependencies {
compile project(':numass-storage:numass-client')
compile project(':numass-client')
compile "hep.dataforge:plots-jfc" // project(':dataforge-plots:plots-jfc')
compile "hep.dataforge:dataforge-control" //project(':dataforge-control')
compile "hep.dataforge:dataforge-fx" //project(':dataforge-fx')

View File

@ -9,6 +9,8 @@ buildscript {
apply plugin: 'com.google.protobuf'
description = "A bse package with minimal dependencies for numass"
dependencies {
compile "hep.dataforge:dataforge-storage" //project(':dataforge-storage')

File diff suppressed because it is too large Load Diff

View File

@ -13,7 +13,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package inr.numass.storage;
package inr.numass.data;
/**
*

View File

@ -13,7 +13,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package inr.numass.storage;
package inr.numass.data;
import hep.dataforge.description.ValueDef;
import hep.dataforge.meta.Meta;
@ -42,7 +42,7 @@ public class NMFile extends NamedMetaHolder implements NumassData {
super(file.getName(), file.meta());
points = new ArrayList<>();
for (RawNMPoint point : file.getData()) {
points.add(new NMPoint(point));
points.add(PointBuilders.readRawPoint(point));
}
}

View File

@ -13,14 +13,13 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package inr.numass.storage;
package inr.numass.data;
import hep.dataforge.tables.DataPoint;
import hep.dataforge.tables.MapPoint;
import java.time.Instant;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.stream.IntStream;
@ -43,34 +42,6 @@ public class NMPoint implements NumassPoint {
this.eventsCount = IntStream.of(spectrum).sum();
}
public NMPoint(RawNMPoint point) {
if (point == null) {
throw new IllegalArgumentException();
}
this.pointLength = point.getLength();
this.u = point.getUset();
// this.uread = point.getUread();
this.startTime = point.getStartTime();
this.eventsCount = point.getEventsCount();
spectrum = calculateSpectrum(point);
}
private int[] calculateSpectrum(RawNMPoint point) {
assert point.getEventsCount() > 0;
int[] result = new int[RawNMPoint.MAX_CHANEL + 1];
Arrays.fill(result, 0);
point.getEvents().stream().forEach((event) -> {
if (event.getChanel() >= RawNMPoint.MAX_CHANEL) {
result[RawNMPoint.MAX_CHANEL]++;
} else {
result[event.getChanel()]++;
}
});
return result;
}
/**
* @return the absouteTime
*/
@ -105,7 +76,6 @@ public class NMPoint implements NumassPoint {
List<DataPoint> data = new ArrayList<>();
for (int i = 0; i < RawNMPoint.MAX_CHANEL; i++) {
data.add(new MapPoint(dataNames, i, spectrum[i]));
}
return data;
}
@ -121,13 +91,6 @@ public class NMPoint implements NumassPoint {
}
/**
* @return the overflow
*/
public int getOverflow() {
return spectrum[RawNMPoint.MAX_CHANEL];
}
/**
* @return the pointLength
*/

View File

@ -3,7 +3,7 @@
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package inr.numass.storage;
package inr.numass.data;
import hep.dataforge.meta.Annotated;
import hep.dataforge.meta.Meta;

View File

@ -13,7 +13,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package inr.numass.storage;
package inr.numass.data;
import hep.dataforge.data.FileDataFactory;
import hep.dataforge.data.binary.Binary;

View File

@ -0,0 +1,134 @@
package inr.numass.data;
import hep.dataforge.tables.DataPoint;
import hep.dataforge.tables.ListTable;
import hep.dataforge.tables.Table;
import java.time.Instant;
import java.util.Arrays;
import java.util.Collection;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.stream.Collectors;
import java.util.stream.Stream;
/**
* Created by darksnake on 30-Jan-17.
*/
public class NumassDataUtils {
public static Collection<NumassPoint> joinSpectra(Stream<NumassData> spectra) {
Map<Double, NumassPoint> map = new LinkedHashMap<>();
spectra.forEach(datum -> {
datum.forEach(point -> {
double uset = point.getVoltage();
if (map.containsKey(uset)) {
map.put(uset, join(point, map.get(uset)));
} else {
map.put(uset, point);
}
});
});
return map.values();
}
/**
* Spectral sum of two points
*
* @param first
* @param second
* @return
*/
public static NumassPoint join(NumassPoint first, NumassPoint second) {
if (first.getVoltage() != second.getVoltage()) {
throw new RuntimeException("Voltage mismatch");
}
int[] newArray = new int[first.getSpectrum().length];
Arrays.setAll(newArray, i -> first.getSpectrum()[i] + second.getSpectrum()[i]);
return new NMPoint(
first.getVoltage(),
Instant.EPOCH,
first.getLength() + second.getLength(),
newArray
);
}
public static NumassPoint substractPoint(NumassPoint point, NumassPoint reference) {
int[] array = new int[point.getSpectrum().length];
Arrays.setAll(array, i -> Math.max(0, point.getSpectrum()[i] - reference.getSpectrum()[i]));
return new NMPoint(
point.getVoltage(),
point.getStartTime(),
point.getLength(),
array
);
}
public static Collection<NumassPoint> substractReferencePoint(Collection<NumassPoint> points, double uset) {
NumassPoint reference = points.stream().filter(it -> it.getVoltage() == uset).findFirst()
.orElseThrow(() -> new RuntimeException("Reference point not found"));
return points.stream().map(it -> substractPoint(it, reference)).collect(Collectors.toList());
}
/**
* Поправка масштаба высокого.
*
* @param data
* @param beta
* @return
*/
public static Table setHVScale(ListTable data, double beta) {
SpectrumDataAdapter reader = adapter();
ListTable.Builder res = new ListTable.Builder(data.getFormat());
for (DataPoint dp : data) {
double corrFactor = 1 + beta;
res.row(reader.buildSpectrumDataPoint(reader.getX(dp).doubleValue() * corrFactor, reader.getCount(dp), reader.getTime(dp)));
}
return res.build();
}
public static SpectrumDataAdapter adapter() {
return new SpectrumDataAdapter("Uset", "CR", "CRerr", "Time");
}
public static Table correctForDeadTime(ListTable data, double dtime) {
return correctForDeadTime(data, adapter(), dtime);
}
/**
* Коррекция на мертвое время в секундах
*
* @param data
* @param dtime
* @return
*/
public static Table correctForDeadTime(ListTable data, SpectrumDataAdapter adapter, double dtime) {
// SpectrumDataAdapter adapter = adapter();
ListTable.Builder res = new ListTable.Builder(data.getFormat());
for (DataPoint dp : data) {
double corrFactor = 1 / (1 - dtime * adapter.getCount(dp) / adapter.getTime(dp));
res.row(adapter.buildSpectrumDataPoint(adapter.getX(dp).doubleValue(), (long) (adapter.getCount(dp) * corrFactor), adapter.getTime(dp)));
}
return res.build();
}
public static double countRateWithDeadTime(NumassPoint p, int from, int to, double deadTime) {
double wind = p.getCountInWindow(from, to) / p.getLength();
double res;
if (deadTime > 0) {
double total = p.getTotalCount();
// double time = p.getLength();
// res = wind / (1 - total * deadTime / time);
double timeRatio = deadTime / p.getLength();
res = wind / total * (1d - Math.sqrt(1d - 4d * total * timeRatio)) / 2d / timeRatio;
} else {
res = wind;
}
return res;
}
public static double countRateWithDeadTimeErr(NumassPoint p, int from, int to, double deadTime) {
return Math.sqrt(countRateWithDeadTime(p, from, to, deadTime) / p.getLength());
}
}

View File

@ -1,4 +1,4 @@
package inr.numass.storage;
package inr.numass.data;
import hep.dataforge.tables.DataPoint;
import hep.dataforge.tables.MapPoint;

View File

@ -0,0 +1,53 @@
package inr.numass.data;
import org.jetbrains.annotations.NotNull;
import java.io.IOException;
import java.io.InputStream;
import java.time.Instant;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.Function;
import java.util.stream.IntStream;
/**
* Created by darksnake on 13-Apr-17.
*/
public class PointBuilders {
public static NumassPoint readProtoPoint(double u, Instant startTime, double pointLength, InputStream stream, Function<NumassProto.Point.Channel.Block.Event, Integer> peakFinder) throws IOException {
NumassProto.Point point = NumassProto.Point.parseFrom(stream);
NumassProto.Point.Channel ch = point.getChannels(0);
int[] spectrum = count(ch.getBlocksList().stream()
.flatMapToInt(block -> IntStream.concat(
block.getPeaks().getAmplitudesList()
.stream().mapToInt(it -> it.intValue()),
block.getEventsList().stream()
.mapToInt(event -> peakFinder.apply(event))
))
);
return new NMPoint(u, startTime, pointLength, spectrum);
}
private static int[] calculateSpectrum(RawNMPoint point) {
assert point.getEventsCount() > 0;
return count(point.getEvents().stream().mapToInt(event -> event.getChanel()));
}
@NotNull
public static NumassPoint readRawPoint(@NotNull RawNMPoint point) {
return new NMPoint(point.getUset(), point.getStartTime(), point.getLength(), calculateSpectrum(point));
}
private static int[] count(IntStream stream) {
List<AtomicInteger> list = new ArrayList<>();
stream.forEach(i -> {
while (list.size() <= i) {
list.add(new AtomicInteger(0));
}
list.get(i).incrementAndGet();
});
return list.stream().mapToInt(i -> i.get()).toArray();
}
}

View File

@ -13,7 +13,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package inr.numass.storage;
package inr.numass.data;
import hep.dataforge.description.ValueDef;
import hep.dataforge.meta.Meta;

View File

@ -13,7 +13,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package inr.numass.storage;
package inr.numass.data;
import java.time.Instant;
import java.util.ArrayList;

View File

@ -3,7 +3,7 @@
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package inr.numass.storage;
package inr.numass.data;
import hep.dataforge.context.Context;
import hep.dataforge.context.Global;

View File

@ -6,6 +6,7 @@ import hep.dataforge.data.DataFilter;
import hep.dataforge.data.DataTree;
import hep.dataforge.meta.Meta;
import hep.dataforge.storage.commons.StorageUtils;
import inr.numass.data.NumassData;
/**
* Created by darksnake on 03-Feb-17.

View File

@ -26,6 +26,7 @@ import hep.dataforge.storage.api.Storage;
import hep.dataforge.storage.filestorage.FileEnvelope;
import hep.dataforge.storage.loaders.AbstractLoader;
import hep.dataforge.tables.Table;
import inr.numass.data.*;
import org.apache.commons.vfs2.FileObject;
import org.apache.commons.vfs2.VFS;
import org.slf4j.LoggerFactory;
@ -44,7 +45,6 @@ import java.util.function.Supplier;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import static inr.numass.storage.RawNMPoint.MAX_EVENTS_PER_POINT;
import static org.apache.commons.vfs2.FileType.FOLDER;
/**
@ -218,7 +218,7 @@ public class NumassDataLoader extends AbstractLoader implements ObjectLoader<Env
//Check if the point is composite
boolean segmented = envelope.meta().getBoolean("split", false);
if (!segmented && events.size() > MAX_EVENTS_PER_POINT) {
if (!segmented && events.size() > RawNMPoint.MAX_EVENTS_PER_POINT) {
pointTime = events.get(events.size() - 1).getTime() - events.get(0).getTime();
}
@ -235,7 +235,7 @@ public class NumassDataLoader extends AbstractLoader implements ObjectLoader<Env
* @return
*/
public NumassPoint readPoint(Envelope envelope) {
return readPoint(envelope, NMPoint::new);
return readPoint(envelope, PointBuilders::readRawPoint);
}
private Map<String, Supplier<Envelope>> getItems() {

View File

@ -23,6 +23,8 @@ import hep.dataforge.meta.MetaBuilder;
import hep.dataforge.storage.filestorage.FilePointLoader;
import hep.dataforge.storage.filestorage.FileStorage;
import hep.dataforge.storage.filestorage.VFSUtils;
import inr.numass.data.NMFile;
import inr.numass.data.NumassData;
import org.apache.commons.io.FilenameUtils;
import org.apache.commons.vfs2.FileObject;
import org.apache.commons.vfs2.FileSystemException;

View File

@ -1,6 +1,6 @@
syntax = "proto3";
package inr.numass.storage;
package inr.numass.data;
message Point {
message Channel {

View File

@ -11,7 +11,7 @@ description = "Main numass project"
dependencies {
compile group: 'commons-cli', name: 'commons-cli', version: '1.+'
compile group: 'commons-io', name: 'commons-io', version: '2.+'
compile project(':numass-storage')
compile project(':numass-core')
compile "hep.dataforge:dataforge-minuit" //project(':dataforge-stat:dataforge-minuit')
compile "hep.dataforge:grind-terminal" //project(':dataforge-grind:grind-terminal')
}

View File

@ -10,7 +10,7 @@ import hep.dataforge.io.ColumnedDataWriter
import hep.dataforge.tables.ListTable
import hep.dataforge.tables.MapPoint
import hep.dataforge.tables.TableFormatBuilder
import inr.numass.storage.NumassData
import inr.numass.data.NumassData
NumassData.metaClass.findPoint{double u ->
delegate.getNMPoints().getWork { it.getVoltage() == u }.getMap(20, true)
@ -56,7 +56,7 @@ NumassData data2 = NMFile.readFile(new File("D:\\Work\\Numass\\transmission 2013
double[] points = [14500,15000,15500,16000,18100,18200,18300]
ColumnedDataWriter.writeDataSet(System.out, buildSet(data1,data2,points), "Detector spectrum substraction");
ColumnedDataWriter.writeTable(System.out, buildSet(data1,data2,points), "Detector spectrum substraction");

View File

@ -6,12 +6,11 @@
package inr.numass.scripts
import inr.numass.storage.NMFile
import inr.numass.storage.NumassData
import inr.numass.storage.NumassDataLoader
import hep.dataforge.grind.GrindMetaBuilder
import hep.dataforge.meta.Meta
import inr.numass.actions.FindBorderAction
import hep.dataforge.grind.GrindMetaBuilder
import inr.numass.data.NumassData
import inr.numass.storage.NumassDataLoader
File dataDir = new File("D:\\Work\\Numass\\data\\2016_04\\T2_data\\Fill_2_2\\set_6_e26d123e54010000")
if(!dataDir.exists()){

View File

@ -1,30 +0,0 @@
/*
* Copyright 2015 Alexander Nozik.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package inr.numass.scripts
import hep.dataforge.context.Context
import hep.dataforge.data.DataNode
import hep.dataforge.stat.fit.FitTaskResult
import inr.numass.Main
import inr.numass.Numass
//Main.main("-lc")
Context context = Numass.buildContext();
context.putValue("integralThreshold", 15d);
DataNode resultPack = Main.run(context, "-c","D:\\sterile-new\\loss2014-11\\d2_19_1.xml")
FitTaskResult result = resultPack.getData().get()
result.print(new PrintWriter(System.out))

View File

@ -12,4 +12,4 @@ import hep.dataforge.tables.Table
File file = new File("D:\\Work\\Numass\\sterile2016\\empty.dat" )
Table referenceTable = new ColumnedDataReader(file).toTable();
ColumnedDataWriter.writeDataSet(System.out, referenceTable,"")
ColumnedDataWriter.writeTable(System.out, referenceTable,"")

View File

@ -88,7 +88,7 @@ data = TritiumUtils.correctForDeadTime(data, new SpectrumDataAdapter(), 10e-9);
// allPars.setParValue("X", 0.4);
ColumnedDataWriter.writeDataSet(System.out,data,"--- DATA ---");
ColumnedDataWriter.writeTable(System.out,data,"--- DATA ---");
FitState state = new FitState(data, model, allPars);
//new PlotFitResultAction().eval(state);

View File

@ -8,9 +8,9 @@ package inr.numass.scripts
import hep.dataforge.grind.Grind
import hep.dataforge.tables.DataPoint
import inr.numass.storage.NMPoint
import inr.numass.data.NMPoint
import inr.numass.data.RawNMPoint
import inr.numass.storage.NumassDataLoader
import inr.numass.storage.RawNMPoint
import inr.numass.utils.NMEventGeneratorWithPulser
import inr.numass.utils.PileUpSimulator
import inr.numass.utils.TritiumUtils

View File

@ -9,8 +9,8 @@ package inr.numass.scripts
import hep.dataforge.io.ColumnedDataWriter
import hep.dataforge.storage.commons.StorageUtils
import hep.dataforge.tables.Table
import inr.numass.storage.NMPoint
import inr.numass.storage.NumassDataUtils
import inr.numass.data.NMPoint
import inr.numass.data.NumassDataUtils
import inr.numass.storage.NumassStorage
import inr.numass.utils.UnderflowCorrection
@ -85,5 +85,5 @@ printPoint(data, [14000d, 14500d, 15000d, 15500d, 16500d])
println()
Table t = new UnderflowCorrection().fitAllPoints(data, 400, 600, 3100, 20);
ColumnedDataWriter.writeDataSet(System.out, t, "underflow parameters")
ColumnedDataWriter.writeTable(System.out, t, "underflow parameters")

View File

@ -29,7 +29,7 @@ import hep.dataforge.plots.data.PlottableData;
import hep.dataforge.plots.data.PlottableXYFunction;
import hep.dataforge.stat.fit.FitState;
import hep.dataforge.stat.models.XYModel;
import hep.dataforge.tables.PointSource;
import hep.dataforge.tables.NavigablePointSource;
import hep.dataforge.tables.XYAdapter;
import java.util.function.Function;
@ -46,7 +46,7 @@ public class PlotFitResultAction extends OneToOneAction<FitState, FitState> {
@Override
protected FitState execute(Context context, String name, FitState input, Laminate metaData) {
PointSource data = input.getDataSet();
NavigablePointSource data = input.getDataSet();
if (!(input.getModel() instanceof XYModel)) {
getReport(context, name).reportError("The fit model should be instance of XYModel for this action. Action failed!");
return input;

View File

@ -20,9 +20,9 @@ import hep.dataforge.data.binary.Binary;
import hep.dataforge.io.BasicIOManager;
import hep.dataforge.meta.Meta;
import hep.dataforge.names.Name;
import inr.numass.data.NumassDataReader;
import inr.numass.data.NumassPawReader;
import inr.numass.storage.NumassDataReader;
import inr.numass.storage.RawNMFile;
import inr.numass.data.RawNMFile;
import org.apache.commons.io.FilenameUtils;
import org.apache.commons.io.output.TeeOutputStream;

View File

@ -80,7 +80,6 @@ public class NumassPlugin extends BasicPlugin {
ActionManager actions = context.pluginManager().getOrLoad(ActionManager.class);
actions.attach(context);
actions.register(SlicingAction.class);
actions.register(PrepareDataAction.class);
actions.register(ReadLegacyDataAction.class);
actions.register(MergeDataAction.class);

View File

@ -21,10 +21,10 @@ import hep.dataforge.description.TypedActionDef;
import hep.dataforge.description.ValueDef;
import hep.dataforge.exceptions.ContentException;
import hep.dataforge.meta.Laminate;
import inr.numass.data.RawNMFile;
import inr.numass.data.RawNMPoint;
import inr.numass.debunch.DebunchReport;
import inr.numass.debunch.FrameAnalizer;
import inr.numass.storage.RawNMFile;
import inr.numass.storage.RawNMPoint;
import java.io.PrintWriter;

View File

@ -25,9 +25,9 @@ import hep.dataforge.tables.ListTable;
import hep.dataforge.tables.MapPoint;
import hep.dataforge.tables.Table;
import hep.dataforge.values.Value;
import inr.numass.storage.NMFile;
import inr.numass.storage.NumassData;
import inr.numass.storage.NumassPoint;
import inr.numass.data.NMFile;
import inr.numass.data.NumassData;
import inr.numass.data.NumassPoint;
import org.apache.commons.math3.analysis.UnivariateFunction;
import java.io.OutputStream;
@ -69,7 +69,7 @@ public class FindBorderAction extends OneToOneAction<NumassData, Table> {
OutputStream stream = buildActionOutput(context, name);
ColumnedDataWriter.writeDataSet(stream, bData, String.format("%s : lower = %d upper = %d", name, lowerBorder, upperBorder));
ColumnedDataWriter.writeTable(stream, bData, String.format("%s : lower = %d upper = %d", name, lowerBorder, upperBorder));
report(context, name, "File {} completed", source.getName());
return bData;

View File

@ -4,9 +4,9 @@ import hep.dataforge.actions.ManyToOneAction;
import hep.dataforge.context.Context;
import hep.dataforge.description.TypedActionDef;
import hep.dataforge.meta.Laminate;
import inr.numass.storage.NMPoint;
import inr.numass.storage.NumassData;
import inr.numass.storage.NumassPoint;
import inr.numass.data.NMPoint;
import inr.numass.data.NumassData;
import inr.numass.data.NumassPoint;
import java.util.Collection;
import java.util.Map;

View File

@ -61,7 +61,7 @@ public class MergeDataAction extends ManyToOneAction<Table, Table> {
@Override
protected void afterGroup(Context context, String groupName, Meta outputMeta, Table output) {
OutputStream stream = buildActionOutput(context, groupName);
ColumnedDataWriter.writeDataSet(stream, output, outputMeta.toString());
ColumnedDataWriter.writeTable(stream, output, outputMeta.toString());
}
// @Override
@ -135,7 +135,7 @@ public class MergeDataAction extends ManyToOneAction<Table, Table> {
private Table mergeDataSets(String name, Collection<Table> ds) {
//Сливаем все точки в один набор данных
Map<Double, List<DataPoint>> points = new LinkedHashMap<>();
for (PointSource d : ds) {
for (Table d : ds) {
if (!d.getFormat().names().contains(parnames)) {
throw new IllegalArgumentException();
}

View File

@ -124,7 +124,7 @@ public class MonitorCorrectAction extends OneToOneAction<Table, Table> {
//
// if (!dataList.isEmpty()) {
// //Генерируем автоматический формат по первой строчке
// format = DataFormat.of(dataList.getRow(0));
// format = DataFormat.of(dataList.getPoint(0));
// } else {
// format = DataFormat.of(parnames);
// }
@ -132,7 +132,7 @@ public class MonitorCorrectAction extends OneToOneAction<Table, Table> {
OutputStream stream = buildActionOutput(context, name);
ColumnedDataWriter.writeDataSet(stream, data, head);
ColumnedDataWriter.writeTable(stream, data, head);
return data;
}
@ -197,7 +197,7 @@ public class MonitorCorrectAction extends OneToOneAction<Table, Table> {
String monitorFileName = meta.getString("monitorFile", "monitor");
OutputStream stream = buildActionOutput(context, monitorFileName);
ListTable data = new ListTable(monitorPoints);
ColumnedDataWriter.writeDataSet(stream, TableTransform.sort(data, "Timestamp", true), "Monitor points", monitorNames);
ColumnedDataWriter.writeTable(stream, TableTransform.sort(data, "Timestamp", true), "Monitor points", monitorNames);
}
}

View File

@ -26,9 +26,13 @@ import hep.dataforge.io.XMLMetaWriter;
import hep.dataforge.meta.Laminate;
import hep.dataforge.meta.Meta;
import hep.dataforge.tables.*;
import inr.numass.data.NumassData;
import inr.numass.data.NumassPoint;
import inr.numass.data.PointBuilders;
import inr.numass.data.RawNMPoint;
import inr.numass.debunch.DebunchReport;
import inr.numass.debunch.FrameAnalizer;
import inr.numass.storage.*;
import inr.numass.storage.NumassDataLoader;
import inr.numass.utils.ExpressionUtils;
import java.io.OutputStream;
@ -112,7 +116,7 @@ public class PrepareDataAction extends OneToOneAction<NumassData, Table> {
long total = point.getTotalCount();
double uset = utransform.apply(point.getVoltage());
double uread = utransform.apply(point.getUread());
double uread = utransform.apply(point.getVoltage());
double time = point.getLength();
int a = getLowerBorder(meta, uset);
int b = Math.min(upper, RawNMPoint.MAX_CHANEL);
@ -163,7 +167,7 @@ public class PrepareDataAction extends OneToOneAction<NumassData, Table> {
OutputStream stream = buildActionOutput(context, name);
ColumnedDataWriter.writeDataSet(stream, data, head);
ColumnedDataWriter.writeTable(stream, data, head);
// log.logString("File %s completed", dataFile.getName());
return data;
}
@ -201,9 +205,9 @@ public class PrepareDataAction extends OneToOneAction<NumassData, Table> {
double cr = point.selectChanels(lower, upper).getCR();
if (cr < maxCR) {
DebunchReport report = new FrameAnalizer(rejectionprob, framelength, lower, upper).debunchPoint(point);
return new NMPoint(report.getPoint());
return PointBuilders.readRawPoint(report.getPoint());
} else {
return new NMPoint(point);
return PointBuilders.readRawPoint(point);
}
}

View File

@ -23,8 +23,8 @@ import hep.dataforge.description.TypedActionDef;
import hep.dataforge.description.ValueDef;
import hep.dataforge.exceptions.ContentException;
import hep.dataforge.meta.Laminate;
import inr.numass.storage.NMFile;
import inr.numass.storage.RawNMFile;
import inr.numass.data.NMFile;
import inr.numass.data.RawNMFile;
import static inr.numass.NumassIO.getNumassData;

View File

@ -18,8 +18,8 @@ import hep.dataforge.plots.data.PlottableData;
import hep.dataforge.plots.data.XYPlottable;
import hep.dataforge.tables.*;
import hep.dataforge.values.ValueType;
import inr.numass.storage.NumassData;
import inr.numass.storage.NumassPoint;
import inr.numass.data.NumassData;
import inr.numass.data.NumassPoint;
import java.io.OutputStream;
import java.util.*;
@ -79,7 +79,7 @@ public class ShowEnergySpectrumAction extends OneToOneAction<NumassData, Table>
OutputStream out = buildActionOutput(context, name);
Table table = builder.build();
ColumnedDataWriter.writeDataSet(out, table, inputMeta.toString());
ColumnedDataWriter.writeTable(out, table, inputMeta.toString());
if (inputMeta.hasMeta("plot") || inputMeta.getBoolean("plot", false)) {
PlotFrame frame = PlotUtils.getPlotManager(context)

View File

@ -1,275 +0,0 @@
/*
* Copyright 2015 Alexander Nozik.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package inr.numass.actions;
import hep.dataforge.actions.OneToOneAction;
import hep.dataforge.context.Context;
import hep.dataforge.description.TypedActionDef;
import hep.dataforge.io.ColumnedDataWriter;
import hep.dataforge.io.PrintFunction;
import hep.dataforge.maths.GridCalculator;
import hep.dataforge.maths.NamedMatrix;
import hep.dataforge.maths.integration.UnivariateIntegrator;
import hep.dataforge.meta.Laminate;
import hep.dataforge.meta.MetaBuilder;
import hep.dataforge.plots.PlotFrame;
import hep.dataforge.plots.PlotUtils;
import hep.dataforge.plots.data.PlottableXYFunction;
import hep.dataforge.stat.fit.FitState;
import hep.dataforge.stat.fit.FitTaskResult;
import hep.dataforge.stat.fit.Param;
import hep.dataforge.stat.fit.ParamSet;
import hep.dataforge.stat.simulation.GaussianParameterGenerator;
import hep.dataforge.tables.ListTable;
import hep.dataforge.tables.MapPoint;
import hep.dataforge.tables.Table;
import hep.dataforge.values.NamedValueSet;
import inr.numass.models.ExperimentalVariableLossSpectrum;
import inr.numass.models.LossCalculator;
import inr.numass.utils.NumassIntegrator;
import org.apache.commons.math3.analysis.UnivariateFunction;
import org.apache.commons.math3.analysis.interpolation.LinearInterpolator;
import org.apache.commons.math3.analysis.interpolation.UnivariateInterpolator;
import org.apache.commons.math3.stat.StatUtils;
import org.apache.commons.math3.stat.descriptive.DescriptiveStatistics;
import org.slf4j.LoggerFactory;
import java.io.OutputStreamWriter;
import java.io.PrintWriter;
import java.nio.charset.Charset;
import java.util.Arrays;
/**
* @author darksnake
*/
@TypedActionDef(name = "showLoss", inputType = FitState.class, outputType = FitState.class,
info = "Show loss spectrum for fit with loss model. Calculate excitation to ionisation ratio.")
@Deprecated
public class ShowLossSpectrumAction extends OneToOneAction<FitState, FitState> {
private static final String[] names = {"X", "exPos", "ionPos", "exW", "ionW", "exIonRatio"};
public static double calcultateIonRatio(NamedValueSet set, double threshold) {
UnivariateIntegrator integrator = NumassIntegrator.getHighDensityIntegrator();
UnivariateFunction integrand = LossCalculator.getSingleScatterFunction(set);
return 1d - integrator.integrate(integrand, 5d, threshold);
}
public static Table generateSpread(PrintWriter writer, String name, NamedValueSet parameters, NamedMatrix covariance) {
int numCalls = 1000;
int gridPoints = 200;
double a = 8;
double b = 32;
double[] grid = GridCalculator.getUniformUnivariateGrid(a, b, gridPoints);
double[] upper = new double[gridPoints];
double[] lower = new double[gridPoints];
double[] dispersion = new double[gridPoints];
double[] central = new double[gridPoints];
UnivariateFunction func = LossCalculator.getSingleScatterFunction(parameters);
for (int j = 0; j < gridPoints; j++) {
central[j] = func.value(grid[j]);
}
Arrays.fill(upper, Double.NEGATIVE_INFINITY);
Arrays.fill(lower, Double.POSITIVE_INFINITY);
Arrays.fill(dispersion, 0);
GaussianParameterGenerator generator = new GaussianParameterGenerator(parameters, covariance);
for (int i = 0; i < numCalls; i++) {
func = LossCalculator.getSingleScatterFunction(generator.generate());
for (int j = 0; j < gridPoints; j++) {
double val = func.value(grid[j]);
upper[j] = Math.max(upper[j], val);
lower[j] = Math.min(lower[j], val);
dispersion[j] += (val - central[j]) * (val - central[j]) / numCalls;
}
}
String[] pointNames = {"e", "central", "lower", "upper", "dispersion"};
ListTable.Builder res = new ListTable.Builder(pointNames);
for (int i = 0; i < gridPoints; i++) {
res.row(new MapPoint(pointNames, grid[i], central[i], lower[i], upper[i], dispersion[i]));
}
return res.build();
}
@Override
protected FitState execute(Context context, String name, FitState input, Laminate meta) {
ParamSet pars = input.getParameters();
if (!pars.names().contains(names)) {
LoggerFactory.getLogger(getClass()).error("Wrong input FitState. Must be loss spectrum fit.");
throw new RuntimeException("Wrong input FitState");
}
UnivariateFunction scatterFunction;
boolean calculateRatio = false;
PlotFrame frame = PlotUtils.getPlotManager(context)
.buildPlotFrame(getName(), name + ".loss",
new MetaBuilder("plot")
.setValue("plotTitle", "Differential scattering crossection for " + name)
);
switch (input.getModel().meta().getString("name", "")) {
case "scatter-variable":
scatterFunction = LossCalculator.getSingleScatterFunction(pars);
calculateRatio = true;
LossCalculator.plotScatter(frame, pars);
break;
case "scatter-empiric-experimental":
scatterFunction = new ExperimentalVariableLossSpectrum.Loss(0.3).total(pars);
frame.add(PlottableXYFunction.plotFunction("Cross-section", (x) -> scatterFunction.value(x), 0, 100, 1000));
break;
default:
throw new RuntimeException("Can work only with variable loss spectra");
}
double threshold = 0;
double ionRatio = -1;
double ionRatioError = -1;
if (calculateRatio) {
threshold = meta.getDouble("ionThreshold", 17);
ionRatio = calcultateIonRatio(pars, threshold);
report(context, name, "The ionization ratio (using threshold {}) is {}", threshold, ionRatio);
ionRatioError = calultateIonRatioError(context, name, input, threshold);
report(context, name, "the ionization ration standard deviation (using threshold {}) is {}", threshold, ionRatioError);
}
if (meta.getBoolean("printResult", false)) {
PrintWriter writer = new PrintWriter(new OutputStreamWriter(buildActionOutput(context, name), Charset.forName("UTF-8")));
// writer.println("*** FIT PARAMETERS ***");
input.print(writer);
// for (Param param : pars.getSubSet(names).getParams()) {
// writer.println(param.toString());
// }
// writer.println();
// onComplete.printf("Chi squared over degrees of freedom: %g/%d = %g", input.getChi2(), input.ndf(), chi2 / this.ndf());
writer.println();
writer.println("*** LOSS SPECTRUM INFORMATION ***");
writer.println();
if (calculateRatio) {
writer.printf("The ionization ratio (using threshold %f) is %f%n", threshold, ionRatio);
writer.printf("The ionization ratio standard deviation (using threshold %f) is %f%n", threshold, ionRatioError);
writer.println();
}
// double integralThreshold = reader.getDouble("numass.eGun", 19005d) - reader.getDouble("integralThreshold", 14.82);
// double integralRatio = calculateIntegralExIonRatio(input.getDataSet(), input.getParameters().getDouble("X"), integralThreshold);
// writer.printf("The excitation to ionization ratio from integral spectrum (using threshold %f) is %f%n", integralThreshold, integralRatio);
writer.println();
writer.println("*** SUMMARY ***");
writer.printf("%s\t", "name");
for (String parName : names) {
writer.printf("%s\t%s\t", parName, parName + "_err");
}
if (calculateRatio) {
writer.printf("%s\t", "ionRatio");
writer.printf("%s\t", "ionRatioErr");
}
writer.printf("%s%n", "chi2");
writer.printf("%s\t", name);
for (Param param : pars.getSubSet(names).getParams()) {
writer.printf("%f\t%f\t", param.value(), param.getErr());
}
if (calculateRatio) {
writer.printf("%f\t", ionRatio);
writer.printf("%f\t", ionRatioError);
}
writer.printf("%f%n", input.getChi2() / ((FitTaskResult) input).ndf());
writer.println();
writer.println("***LOSS SPECTRUM***");
writer.println();
PrintFunction.printFunctionSimple(writer, scatterFunction, 0, 100, 500);
if (meta.getBoolean("showSpread", false)) {
writer.println("***SPECTRUM SPREAD***");
writer.println();
ParamSet parameters = input.getParameters().getSubSet("exPos", "ionPos", "exW", "ionW", "exIonRatio");
NamedMatrix covariance = input.getCovariance();
Table spreadData = generateSpread(writer, name, parameters, covariance);
ColumnedDataWriter.writeDataSet(System.out, spreadData, "", spreadData.getFormat().namesAsArray());
}
}
return input;
}
private double calculateIntegralExIonRatio(Table data, double X, double integralThreshold) {
double scatterProb = 1 - Math.exp(-X);
double[] x = data.getColumn("Uset").asList().stream().mapToDouble((val) -> val.doubleValue()).toArray();
double[] y = data.getColumn("CR").asList().stream().mapToDouble((val) -> val.doubleValue()).toArray();
double yMax = StatUtils.max(y);
UnivariateInterpolator interpolator = new LinearInterpolator();
UnivariateFunction interpolated = interpolator.interpolate(x, y);
double thresholdValue = interpolated.value(integralThreshold);
double one = 1 - X * Math.exp(-X);
double ionProb = (one - thresholdValue / yMax);
double exProb = (thresholdValue / yMax - one + scatterProb);
return exProb / ionProb;
}
public double calultateIonRatioError(Context context, String dataNeme, FitState state, double threshold) {
ParamSet parameters = state.getParameters().getSubSet("exPos", "ionPos", "exW", "ionW", "exIonRatio");
NamedMatrix covariance = state.getCovariance();
return calultateIonRatioError(context, dataNeme, parameters, covariance, threshold);
}
@SuppressWarnings("Unchecked")
public double calultateIonRatioError(Context context, String name, NamedValueSet parameters, NamedMatrix covariance, double threshold) {
int number = 10000;
double[] res = new GaussianParameterGenerator(parameters, covariance)
.generate(number)
.stream()
.mapToDouble((vector) -> calcultateIonRatio(vector, threshold))
.filter(d -> !Double.isNaN(d))
.toArray();
// Histogram hist = new Histogram(0.3, 0.5, 0.002);
// hist.fill(res);
// PlotFrame frame = PlotUtils.getPlotManager(context)
// .buildPlotFrame(getName(), name + ".ionRatio",
// new MetaBuilder("plot").setValue("plotTitle", "Ion ratio Distribution for " + name)
// );
// frame.add(PlottableData.plot("ionRatio", new XYAdapter("binCenter", "count"), hist));
return new DescriptiveStatistics(res).getStandardDeviation();
}
}

View File

@ -1,90 +0,0 @@
/*
* Copyright 2015 Alexander Nozik.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package inr.numass.actions;
import hep.dataforge.tables.MapPoint;
import hep.dataforge.tables.SimplePointSource;
import hep.dataforge.tables.TableFormat;
import hep.dataforge.values.Value;
import inr.numass.storage.NMFile;
import inr.numass.storage.NumassPoint;
import org.apache.commons.math3.util.Pair;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map;
/**
*
* @author Darksnake
*/
public class SlicedData extends SimplePointSource {
private static final String TNAME = "Time";
//format = {U,username1,username2, ...}
private static final String UNAME = "U";
public SlicedData(NMFile file, Map<String, Pair<Integer, Integer>> intervals, boolean normalize) {
super(prepateFormat(intervals));
fill(file, intervals, normalize);
}
private static TableFormat prepateFormat(Map<String,Pair<Integer,Integer>> intervals){
ArrayList<String> names = new ArrayList<>(intervals.keySet());
names.add(0, TNAME);
names.add(0, UNAME);
return TableFormat.forNames(names);
}
private void fill(NMFile file, Map<String,Pair<Integer,Integer>> intervals, boolean normalize){
for (NumassPoint point : file) {
//создаем основу для будущей точки
HashMap<String,Value> map = new HashMap<>();
//Кладем напряжение
map.put(UNAME, Value.of(point.getVoltage()));
double t = point.getLength();
map.put(TNAME, Value.of(t));
for (Map.Entry<String, Pair<Integer, Integer>> entry : intervals.entrySet()) {
String name = entry.getKey();
Pair<Integer, Integer> pair = entry.getValue();
int a = pair.getFirst();
int b = pair.getSecond();
int count;
// проверяем порядок границ и переворачиваем если нужно
if(b>a){
count = point.getCountInWindow(a, b);
} else if(b<a) {
count = point.getCountInWindow(b, a);
} else{
count = point.getCount(a);
}
//пихаем все в map
if(normalize){
map.put(name, Value.of(count/t));
} else {
map.put(name, Value.of(count));
}
}
this.addRow(new MapPoint(map));
}
}
}

View File

@ -1,81 +0,0 @@
/*
* Copyright 2015 Alexander Nozik.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package inr.numass.actions;
import hep.dataforge.actions.OneToOneAction;
import hep.dataforge.context.Context;
import hep.dataforge.description.TypedActionDef;
import hep.dataforge.exceptions.ContentException;
import hep.dataforge.io.ColumnedDataWriter;
import hep.dataforge.meta.Laminate;
import hep.dataforge.meta.Meta;
import inr.numass.storage.NMFile;
import inr.numass.storage.RawNMPoint;
import org.apache.commons.math3.util.Pair;
import java.io.OutputStream;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
/**
* @author Darksnake
*/
@TypedActionDef(name = "slicing", inputType = NMFile.class, outputType = NMFile.class)
public class SlicingAction extends OneToOneAction<NMFile, NMFile> {
public static final String name = "slicing";
@Override
public String getName() {
return name;
}
@Override
protected NMFile execute(Context context, String name, NMFile source, Laminate meta) throws ContentException {
boolean normalize;
Map<String, Pair<Integer, Integer>> slicingConfig;
LinkedHashMap<String, Pair<Integer, Integer>> res = new LinkedHashMap<>();
List<? extends Meta> list = meta.getMeta("sliceconfig").getMetaList("slicepoint");
for (Meta slice : list) {
String title = slice.getString("title", slice.getName());
int from = slice.getInt("from", 0);
int to = slice.getInt("to", RawNMPoint.MAX_CHANEL);
res.put(title, new Pair<>(from, to));
}
slicingConfig = res;
normalize = meta.getBoolean("normalize", false);
if (slicingConfig == null) {
throw new RuntimeException("Slice configuration not defined");
}
report(context, name, "File {} started", source.getName());
SlicedData sData = new SlicedData(source, slicingConfig, normalize);
OutputStream stream = buildActionOutput(context, name);
ColumnedDataWriter.writeDataSet(stream, sData, null);
report(context, name, "File {} completed", source.getName());
return source;
}
}

View File

@ -49,7 +49,7 @@ public class SubstractSpectrumAction extends OneToOneAction<Table, Table> {
Table res = builder.build();
OutputStream stream = buildActionOutput(context, name);
ColumnedDataWriter.writeDataSet(stream, res, inputMeta.toString());
ColumnedDataWriter.writeTable(stream, res, inputMeta.toString());
return res;
} catch (IOException ex) {
throw new RuntimeException("Could not read reference file", ex);

View File

@ -113,7 +113,7 @@ public class SummaryAction extends ManyToOneAction<FitState, Table> {
@Override
protected void afterGroup(Context context, String groupName, Meta outputMeta, Table output) {
OutputStream stream = buildActionOutput(context, groupName);
ColumnedDataWriter.writeDataSet(stream, output, groupName);
ColumnedDataWriter.writeTable(stream, output, groupName);
super.afterGroup(context, groupName, outputMeta, output);
}

View File

@ -1,84 +0,0 @@
/*
* Copyright 2015 Alexander Nozik.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package inr.numass.data;
import hep.dataforge.io.ColumnedDataWriter;
import hep.dataforge.tables.MapPoint;
import hep.dataforge.tables.SimplePointSource;
import hep.dataforge.tables.TableFormat;
import hep.dataforge.tables.TableFormatBuilder;
import hep.dataforge.values.Value;
import hep.dataforge.values.ValueType;
import inr.numass.storage.NumassPoint;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
*
* @author Darksnake
*/
public class ESpectrum extends SimplePointSource {
private final static String binCenter = "chanel";
int binning = 1;
public ESpectrum(List<NumassPoint> points, int binning, boolean normalize) {
super(prepareFormat(points));
this.binning = binning;
fill(points, normalize);
}
private static TableFormat prepareFormat(List<NumassPoint> points) {
TableFormatBuilder builder = new TableFormatBuilder();
builder.addString(binCenter);
points.stream().forEach((point) -> {
builder.addColumn(format("%.3f", point.getUread()), 10, ValueType.NUMBER);
});
return builder.build();
}
private void fill(List<NumassPoint> points, boolean normalize) {
assert !points.isEmpty();
List<Map<Double, Double>> spectra = new ArrayList<>();
for (NumassPoint numassPoint : points) {
spectra.add(numassPoint.getMap(binning, normalize));
}
for (Double x : spectra.get(0).keySet()) {
Map<String, Value> res = new HashMap<>();
res.put(binCenter, Value.of(x));
for (int j = 0; j < points.size(); j++) {
res.put(format("%.3f", points.get(j).getUread()), Value.of(spectra.get(j).get(x)));
}
this.addRow(new MapPoint(res));
}
}
public void printToFile(OutputStream stream) {
ColumnedDataWriter.writeDataSet(stream, this, null);
// new ColumnedDataWriter(stream, this.getFormat().asArray()).writeDataSet(this, null);
}
}

View File

@ -1,42 +0,0 @@
/*
* Copyright 2015 Alexander Nozik.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package inr.numass.data;
import static java.lang.Math.max;
/**
* инструменты для работы с энергитическим спектром (который в каналах)
* @author Darksnake
*/
public class ESpectrumUtils {
public static int[] substract(int[] sp1, int[] sp2) {
return substract(sp1, sp2, 0, sp1.length);
}
public static int[] substract(int[] sp1, int[] sp2, int from, int to) {
assert sp1.length == sp2.length;
assert to >= from;
assert to <= sp1.length;
int[] res = new int[sp1.length];
for (int i = from; i < to; i++) {
res[i] = max(0, sp1[i]-sp2[i]);
}
return res;
}
}

View File

@ -15,8 +15,8 @@
*/
package inr.numass.data;
import inr.numass.storage.RawNMFile;
import hep.dataforge.data.binary.Binary;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;

View File

@ -15,17 +15,17 @@
*/
package inr.numass.data;
import hep.dataforge.stat.parametric.ParametricFunction;
import static hep.dataforge.maths.MatrixOperations.inverse;
import hep.dataforge.maths.NamedMatrix;
import hep.dataforge.stat.parametric.ParametricFunction;
import hep.dataforge.tables.DataPoint;
import hep.dataforge.tables.ListTable;
import hep.dataforge.values.NamedValueSet;
import inr.numass.utils.TritiumUtils;
import org.apache.commons.math3.analysis.UnivariateFunction;
import org.apache.commons.math3.linear.Array2DRowRealMatrix;
import org.apache.commons.math3.linear.RealMatrix;
import static hep.dataforge.maths.MatrixOperations.inverse;
/**
*
* @author Darksnake
@ -57,7 +57,7 @@ public class SpectrumInformation {
* @return
*/
public NamedMatrix getInformationMatrix(NamedValueSet set, ListTable data, String... parNames) {
SpectrumDataAdapter reader = TritiumUtils.adapter();
SpectrumDataAdapter reader = NumassDataUtils.adapter();
String[] names = parNames;
if (names.length == 0) {

View File

@ -15,8 +15,9 @@
*/
package inr.numass.debunch;
import inr.numass.storage.NMEvent;
import inr.numass.storage.RawNMPoint;
import inr.numass.data.NMEvent;
import inr.numass.data.RawNMPoint;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;

View File

@ -15,7 +15,7 @@
*/
package inr.numass.debunch;
import inr.numass.storage.NMEvent;
import inr.numass.data.NMEvent;
/**
*

View File

@ -15,8 +15,9 @@
*/
package inr.numass.debunch;
import inr.numass.storage.NMEvent;
import inr.numass.storage.RawNMPoint;
import inr.numass.data.NMEvent;
import inr.numass.data.RawNMPoint;
import java.util.List;
/**

View File

@ -15,8 +15,9 @@
*/
package inr.numass.debunch;
import inr.numass.storage.NMEvent;
import inr.numass.storage.RawNMPoint;
import inr.numass.data.NMEvent;
import inr.numass.data.RawNMPoint;
import java.util.ArrayList;
import java.util.List;

View File

@ -15,7 +15,7 @@
*/
package inr.numass.debunch;
import inr.numass.storage.RawNMPoint;
import inr.numass.data.RawNMPoint;
/**
*

View File

@ -15,10 +15,11 @@
*/
package inr.numass.debunch;
import inr.numass.storage.NMEvent;
import inr.numass.data.NMEvent;
import org.apache.commons.math3.distribution.PoissonDistribution;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.math3.distribution.PoissonDistribution;
/**
*

View File

@ -15,7 +15,7 @@
*/
package inr.numass.debunch;
import inr.numass.storage.RawNMPoint;
import inr.numass.data.RawNMPoint;
import org.apache.commons.math3.analysis.UnivariateFunction;
import org.apache.commons.math3.analysis.interpolation.LinearInterpolator;
import org.apache.commons.math3.util.FastMath;

View File

@ -85,7 +85,7 @@ public class NumassFitScanSummaryTask extends AbstractTask<Table> {
try (OutputStream stream = buildActionOutput(context, nodeName)) {
String head = "Sterile neutrino mass scan summary\n" + meta.toString();
ColumnedDataWriter.writeDataSet(stream, res, head);
ColumnedDataWriter.writeTable(stream, res, head);
} catch (IOException e) {
getLogger(meta).error("Failed to close output stream", e);
}

View File

@ -19,7 +19,7 @@ import hep.dataforge.workspace.TaskModel;
import inr.numass.actions.MergeDataAction;
import inr.numass.actions.MonitorCorrectAction;
import inr.numass.actions.PrepareDataAction;
import inr.numass.storage.NumassData;
import inr.numass.data.NumassData;
/**
* Prepare data task

View File

@ -53,7 +53,7 @@ public class NumassSubstractEmptySourceTask extends AbstractTask<Table> {
res.getGoal().onComplete((r, err) -> {
if (r != null) {
OutputStream out = model.getContext().io().out("merge", input.getName() + ".subtract");
ColumnedDataWriter.writeDataSet(out, r,
ColumnedDataWriter.writeTable(out, r,
input.meta().getBuilder().setNode("empty", emptySource.meta()).toString());
}
});

View File

@ -15,13 +15,14 @@
*/
package inr.numass.utils;
import inr.numass.storage.NMEvent;
import inr.numass.storage.RawNMPoint;
import java.util.ArrayList;
import inr.numass.data.NMEvent;
import inr.numass.data.RawNMPoint;
import org.apache.commons.math3.random.MersenneTwister;
import org.apache.commons.math3.random.RandomGenerator;
import org.apache.commons.math3.random.SynchronizedRandomGenerator;
import java.util.ArrayList;
/**
*
* @author Darksnake

View File

@ -16,9 +16,9 @@
package inr.numass.utils;
import hep.dataforge.meta.Meta;
import inr.numass.storage.NMEvent;
import inr.numass.storage.NumassPoint;
import inr.numass.storage.RawNMPoint;
import inr.numass.data.NMEvent;
import inr.numass.data.NumassPoint;
import inr.numass.data.RawNMPoint;
import org.apache.commons.math3.distribution.EnumeratedRealDistribution;
import org.apache.commons.math3.distribution.RealDistribution;
import org.apache.commons.math3.random.EmpiricalDistribution;

View File

@ -1,7 +1,7 @@
package inr.numass.utils;
import hep.dataforge.meta.Meta;
import inr.numass.storage.NMEvent;
import inr.numass.data.NMEvent;
import org.apache.commons.math3.distribution.NormalDistribution;
import org.apache.commons.math3.distribution.RealDistribution;
import org.apache.commons.math3.random.RandomGenerator;

View File

@ -5,10 +5,10 @@
*/
package inr.numass.utils;
import inr.numass.storage.NMEvent;
import inr.numass.storage.NMPoint;
import inr.numass.storage.NumassPoint;
import inr.numass.storage.RawNMPoint;
import inr.numass.data.NMEvent;
import inr.numass.data.NumassPoint;
import inr.numass.data.PointBuilders;
import inr.numass.data.RawNMPoint;
import org.apache.commons.math3.random.RandomGenerator;
import java.util.ArrayList;
@ -55,15 +55,15 @@ public class PileUpSimulator {
}
public NumassPoint generated() {
return new NMPoint(new RawNMPoint(uSet, generated, pointLength));
return PointBuilders.readRawPoint(new RawNMPoint(uSet, generated, pointLength));
}
public NumassPoint registered() {
return new NMPoint(new RawNMPoint(uSet, registered, pointLength));
return PointBuilders.readRawPoint(new RawNMPoint(uSet, registered, pointLength));
}
public NumassPoint pileup() {
return new NMPoint(new RawNMPoint(uSet, pileup, pointLength));
return PointBuilders.readRawPoint(new RawNMPoint(uSet, pileup, pointLength));
}
/**

View File

@ -15,11 +15,7 @@
*/
package inr.numass.utils;
import hep.dataforge.tables.DataPoint;
import hep.dataforge.tables.ListTable;
import hep.dataforge.tables.Table;
import inr.numass.data.SpectrumDataAdapter;
import inr.numass.storage.NumassPoint;
import inr.numass.data.NumassPoint;
import org.apache.commons.math3.analysis.UnivariateFunction;
import java.util.HashMap;
@ -32,48 +28,6 @@ import static java.lang.Math.*;
*/
public class TritiumUtils {
public static Table correctForDeadTime(ListTable data, double dtime) {
return correctForDeadTime(data, adapter(), dtime);
}
/**
* Коррекция на мертвое время в секундах
*
* @param data
* @param dtime
* @return
*/
public static Table correctForDeadTime(ListTable data, SpectrumDataAdapter adapter, double dtime) {
// SpectrumDataAdapter adapter = adapter();
ListTable.Builder res = new ListTable.Builder(data.getFormat());
for (DataPoint dp : data) {
double corrFactor = 1 / (1 - dtime * adapter.getCount(dp) / adapter.getTime(dp));
res.row(adapter.buildSpectrumDataPoint(adapter.getX(dp).doubleValue(), (long) (adapter.getCount(dp) * corrFactor), adapter.getTime(dp)));
}
return res.build();
}
/**
* Поправка масштаба высокого.
*
* @param data
* @param beta
* @return
*/
public static Table setHVScale(ListTable data, double beta) {
SpectrumDataAdapter reader = adapter();
ListTable.Builder res = new ListTable.Builder(data.getFormat());
for (DataPoint dp : data) {
double corrFactor = 1 + beta;
res.row(reader.buildSpectrumDataPoint(reader.getX(dp).doubleValue() * corrFactor, reader.getCount(dp), reader.getTime(dp)));
}
return res.build();
}
public static SpectrumDataAdapter adapter() {
return new SpectrumDataAdapter("Uset", "CR", "CRerr", "Time");
}
/**
* Integral beta spectrum background with given amplitude (total count rate
* from)
@ -107,24 +61,6 @@ public class TritiumUtils {
return res * 1E-23;
}
public static double countRateWithDeadTime(NumassPoint p, int from, int to, double deadTime) {
double wind = p.getCountInWindow(from, to) / p.getLength();
double res;
if (deadTime > 0) {
double total = p.getTotalCount();
// double time = p.getLength();
// res = wind / (1 - total * deadTime / time);
double timeRatio = deadTime / p.getLength();
res = wind / total * (1d - Math.sqrt(1d - 4d * total * timeRatio)) / 2d / timeRatio;
} else {
res = wind;
}
return res;
}
public static double countRateWithDeadTimeErr(NumassPoint p, int from, int to, double deadTime) {
return Math.sqrt(countRateWithDeadTime(p, from, to, deadTime) / p.getLength());
}
/**
* Evaluate groovy expression using numass point as parameter
@ -136,7 +72,7 @@ public class TritiumUtils {
public static double pointExpression(String expression, NumassPoint point) {
Map<String, Object> exprParams = new HashMap<>();
exprParams.put("T", point.getLength());
exprParams.put("U", point.getUread());
exprParams.put("U", point.getVoltage());
exprParams.put("cr", ((double) point.getTotalCount()) / point.getLength());
exprParams.put("point", point);
return ExpressionUtils.function(expression, exprParams);

View File

@ -9,7 +9,7 @@ import hep.dataforge.tables.DataPoint;
import hep.dataforge.tables.ListTable;
import hep.dataforge.tables.MapPoint;
import hep.dataforge.tables.Table;
import inr.numass.storage.NumassPoint;
import inr.numass.data.NumassPoint;
import org.apache.commons.math3.analysis.ParametricUnivariateFunction;
import org.apache.commons.math3.exception.DimensionMismatchException;
import org.apache.commons.math3.fitting.SimpleCurveFitter;

View File

@ -17,7 +17,7 @@ apply plugin: 'com.github.johnrengelman.shadow'
mainClassName = "inr.numass.server.ServerRunner"
dependencies {
compile project(':numass-storage')
compile project(':numass-core')
compile "hep.dataforge:storage-servlet" // project(':dataforge-storage:storage-servlet')
compile 'commons-daemon:commons-daemon:1.+'
}

View File

@ -12,7 +12,7 @@ import hep.dataforge.storage.api.PointLoader;
import hep.dataforge.storage.api.Storage;
import hep.dataforge.storage.servlet.ServletUtils;
import hep.dataforge.storage.servlet.StorageRatpackHandler;
import inr.numass.storage.NumassData;
import inr.numass.data.NumassData;
import org.slf4j.LoggerFactory;
import ratpack.handling.Context;

View File

@ -1,69 +0,0 @@
package inr.numass.storage;
import java.time.Instant;
import java.util.Arrays;
import java.util.Collection;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.stream.Collectors;
import java.util.stream.Stream;
/**
* Created by darksnake on 30-Jan-17.
*/
public class NumassDataUtils {
public static Collection<NumassPoint> joinSpectra(Stream<NumassData> spectra) {
Map<Double, NumassPoint> map = new LinkedHashMap<>();
spectra.forEach(datum -> {
datum.forEach(point -> {
double uset = point.getVoltage();
if (map.containsKey(uset)) {
map.put(uset, join(point, map.get(uset)));
} else {
map.put(uset, point);
}
});
});
return map.values();
}
/**
* Spectral sum of two points
*
* @param first
* @param second
* @return
*/
public static NumassPoint join(NumassPoint first, NumassPoint second) {
if (first.getVoltage() != second.getVoltage()) {
throw new RuntimeException("Voltage mismatch");
}
int[] newArray = new int[first.getSpectrum().length];
Arrays.setAll(newArray, i -> first.getSpectrum()[i] + second.getSpectrum()[i]);
return new NMPoint(
first.getVoltage(),
Instant.EPOCH,
first.getLength() + second.getLength(),
newArray
);
}
public static NumassPoint substractPoint(NumassPoint point, NumassPoint reference) {
int[] array = new int[point.getSpectrum().length];
Arrays.setAll(array, i -> Math.max(0, point.getSpectrum()[i] - reference.getSpectrum()[i]));
return new NMPoint(
point.getVoltage(),
point.getStartTime(),
point.getLength(),
array
);
}
public static Collection<NumassPoint> substractReferencePoint(Collection<NumassPoint> points, double uset) {
NumassPoint reference = points.stream().filter(it -> it.getVoltage() == uset).findFirst()
.orElseThrow(() -> new RuntimeException("Reference point not found"));
return points.stream().map(it -> substractPoint(it, reference)).collect(Collectors.toList());
}
}

View File

@ -1,22 +0,0 @@
package inr.numass.storage;
/**
* Created by darksnake on 13-Apr-17.
*/
public class ProtoUtils {
// public static NumassPoint readProtoPoint(InputStream stream, Function<Channel.Block.Event, Long> peakFinder) throws IOException {
// Point point = NumassProto.Point.parseFrom(stream);
// Channel ch = point.getChannels(0);
// ch.getBlocksList().stream()
// .flatMapToLong(block -> {
// return LongStream.concat(
// block.getPeaks().getAmplitudesList()
// .stream().mapToLong(it -> it.longValue()),
// block.getEventsList().stream()
// .mapToLong(it->)
// )
// });
//
// return new NMPoint();
}
}

View File

@ -1,5 +1,8 @@
apply plugin: 'groovy'
description = "Test module for numass client and server"
task runServer(type: JavaExec) {
description 'Start numass server locally'
@ -23,7 +26,7 @@ task runClient(type: JavaExec) {
}
dependencies {
compile project(':numass-storage:numass-client')
compile project(':numass-storage:numass-server')
compile project(':numass-client')
compile project(':numass-server')
compile "hep.dataforge:dataforge-grind" //project(':dataforge-grind')
}
}

Some files were not shown because too many files have changed in this diff Show More