Spectrum merger

This commit is contained in:
darksnake 2017-01-30 16:59:46 +03:00
parent b97ac722ed
commit 530bc869d3
14 changed files with 142 additions and 91 deletions

View File

@ -7,20 +7,29 @@
package inr.numass.scripts
import hep.dataforge.io.ColumnedDataWriter
import hep.dataforge.storage.commons.StorageUtils
import hep.dataforge.tables.Table
import inr.numass.storage.NumassData
import inr.numass.storage.NumassDataLoader
import inr.numass.storage.NMPoint
import inr.numass.storage.NumassDataUtils
import inr.numass.storage.NumassStorage
import inr.numass.utils.UnderflowCorrection
//File dataDir = new File("D:\\Work\\Numass\\data\\2016_04\\T2_data\\Fill_2_2\\set_7_b2a3433e54010000")
//File dataDir = new File("D:\\Work\\Numass\\data\\2016_04\\T2_data\\Fill_2_2\\set_6_e26d123e54010000")
//File dataDir = new File("D:\\Work\\Numass\\data\\2016_10\\Fill_1\\set_28")
File dataDir = new File("D:\\Work\\Numass\\data\\2016_10\\Fill_2_wide\\set_31")
if(!dataDir.exists()){
println "dataDir directory does not exist"
}
NumassData data = NumassDataLoader.fromLocalDir(null, dataDir)
//NumassData data = NMFile.readFile(new File("D:\\Work\\Numass\\sterilie2013-2014\\dat\\2013\\SCAN06.DAT" ))
//File dataDir = new File("D:\\Work\\Numass\\data\\2016_10\\Fill_2_wide\\set_31")
File rootDir = new File("D:\\Work\\Numass\\data\\2016_10\\Fill_2_wide")
NumassStorage storage = NumassStorage.buildLocalNumassRoot(rootDir, true);
Iterable<NMPoint> data = NumassDataUtils.sumSpectra(
StorageUtils.loaderStream(storage).map { it.value }.filter { it.name.matches("set_.{2,3}") }
)
//if(!dataDir.exists()){
// println "dataDir directory does not exist"
//}
//NumassData data = NumassDataLoader.fromLocalDir(null, dataDir)
////NumassData data = NMFile.readFile(new File("D:\\Work\\Numass\\sterilie2013-2014\\dat\\2013\\SCAN06.DAT" ))
Table t = new UnderflowCorrection().fitAllPoints(data, 400, 650, 3100, 20);
ColumnedDataWriter.writeDataSet(System.out, t, "underflow parameters")

View File

@ -49,8 +49,7 @@ public class DebunchAction extends OneToOneAction<RawNMFile, RawNMFile> {
double framelength = meta.getDouble("framelength", 5);
double maxCR = meta.getDouble("maxcr", 100d);
RawNMFile res = new RawNMFile(source.getName());
res.setHead(source.getHead());
RawNMFile res = new RawNMFile(source.getName(), source.getHead());
source.getData().stream().map((point) -> {
double cr = point.selectChanels(lower, upper).getCR();
if (cr < maxCR) {

View File

@ -86,7 +86,7 @@ public class FindBorderAction extends OneToOneAction<NumassData, Table> {
}
private void fill(ListTable.Builder dataBuilder, NumassData file, int lower, int upper, NMPoint reference) {
for (NMPoint point : file.getNMPoints()) {
for (NMPoint point : file) {
if ((reference != null) && (point.getUset() == reference.getUset())) {
continue;
}

View File

@ -47,7 +47,7 @@ public class MergeDataAction extends ManyToOneAction<Table, Table> {
if (meta.hasValue("grouping.byValue")) {
groups = super.buildGroups(context, input, actionMeta);
} else {
groups = GroupBuilder.byValue(MERGE_NAME, meta.getString(MERGE_NAME, "merge")).group(input);
groups = GroupBuilder.byValue(MERGE_NAME, meta.getString(MERGE_NAME, input.getName())).group(input);
}
return groups;
}

View File

@ -100,7 +100,7 @@ public class PrepareDataAction extends OneToOneAction<NumassData, Table> {
}
List<DataPoint> dataList = new ArrayList<>();
for (NMPoint point : dataFile.getNMPoints()) {
for (NMPoint point : dataFile) {
long total = point.getEventsCount();
double uset = utransform.apply(point.getUset());

View File

@ -36,23 +36,21 @@ public class SlicedData extends SimplePointSource {
//format = {U,username1,username2, ...}
private static final String UNAME = "U";
private static TableFormat prepateFormat(Map<String,Pair<Integer,Integer>> intervals){
ArrayList<String> names = new ArrayList<>(intervals.keySet());
names.add(0, TNAME);
names.add(0, UNAME);
return TableFormat.forNames(names);
}
public SlicedData(NMFile file, Map<String,Pair<Integer,Integer>> intervals, boolean normalize) {
public SlicedData(NMFile file, Map<String, Pair<Integer, Integer>> intervals, boolean normalize) {
super(prepateFormat(intervals));
fill(file, intervals, normalize);
}
private static TableFormat prepateFormat(Map<String,Pair<Integer,Integer>> intervals){
ArrayList<String> names = new ArrayList<>(intervals.keySet());
names.add(0, TNAME);
names.add(0, UNAME);
return TableFormat.forNames(names);
}
private void fill(NMFile file, Map<String,Pair<Integer,Integer>> intervals, boolean normalize){
for (NMPoint point : file.getNMPoints()) {
for (NMPoint point : file) {
//создаем основу для будущей точки
HashMap<String,Value> map = new HashMap<>();

View File

@ -10,7 +10,6 @@ import hep.dataforge.meta.Meta;
import hep.dataforge.tables.ListTable;
import hep.dataforge.tables.Table;
import inr.numass.storage.NMPoint;
import inr.numass.storage.NumassData;
import inr.numass.storage.RawNMPoint;
import org.apache.commons.math3.analysis.ParametricUnivariateFunction;
import org.apache.commons.math3.exception.DimensionMismatchException;
@ -51,18 +50,18 @@ public class UnderflowCorrection {
}
}
public Table fitAllPoints(NumassData data, int xLow, int xHigh, int binning) {
public Table fitAllPoints(Iterable<NMPoint> data, int xLow, int xHigh, int binning) {
ListTable.Builder builder = new ListTable.Builder("U", "amp", "expConst");
for (NMPoint point : data.getNMPoints()) {
for (NMPoint point : data) {
double[] fitRes = getUnderflowExpParameters(point, xLow, xHigh, binning);
builder.row(point.getUset(), fitRes[0], fitRes[1]);
}
return builder.build();
}
public Table fitAllPoints(NumassData data, int xLow, int xHigh, int upper, int binning) {
public Table fitAllPoints(Iterable<NMPoint> data, int xLow, int xHigh, int upper, int binning) {
ListTable.Builder builder = new ListTable.Builder("U", "amp", "expConst", "correction");
for (NMPoint point : data.getNMPoints()) {
for (NMPoint point : data) {
double norm = ((double) point.getCountInWindow(xLow, upper))/point.getLength();
double[] fitRes = getUnderflowExpParameters(point, xLow, xHigh, binning);
builder.row(point.getUset(), fitRes[0], fitRes[1], fitRes[0] * fitRes[1] * (Math.exp(xLow / fitRes[1]) - 1d) / norm + 1d);

View File

@ -17,17 +17,17 @@ package inr.numass.storage;
import hep.dataforge.description.ValueDef;
import hep.dataforge.meta.Meta;
import hep.dataforge.meta.MetaBuilder;
import hep.dataforge.names.NamedMetaHolder;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.time.Instant;
import java.util.ArrayList;
import java.util.List;
import java.util.stream.Stream;
/**
*
* Объект, содержащий только спектры, но не сами события
*
* @author Darksnake
@ -35,51 +35,33 @@ import java.util.List;
@ValueDef(name = "numass.path", info = "Path to this data file in numass repository.")
@ValueDef(name = "numass.name", info = "The name of this data file.")
public class NMFile extends NamedMetaHolder implements NumassData {
public static NMFile readStream(InputStream is, String fname, Meta config) throws IOException{
return new NMFile(new NumassDataReader(is, fname, config).read());
}
public static NMFile readFile(File file) throws IOException{
return new NMFile(new NumassDataReader(file).read());
}
private final String head;
private final List<NMPoint> points;
public NMFile(RawNMFile file) {
super(file.getName(), file.meta());
this.head = file.getHead();
points = new ArrayList<>();
for (RawNMPoint point : file.getData()) {
points.add(new NMPoint(point));
}
}
public static NMFile readStream(InputStream is, String fname, Meta config) throws IOException {
return new NMFile(new NumassDataReader(is, fname, config).read());
}
public static NMFile readFile(File file) throws IOException {
return new NMFile(new NumassDataReader(file).read());
}
@Override
public String getDescription() {
return "";
}
/**
* @return the head
*/
public String getHead() {
return head;
}
@Override
public Meta meta() {
return new MetaBuilder("info").setValue("info", head);
}
/**
* @return the points
*/
@Override
public List<NMPoint> getNMPoints() {
return points;
public Stream<NMPoint> stream() {
return points.stream();
}
@Override

View File

@ -11,21 +11,32 @@ import hep.dataforge.names.Named;
import hep.dataforge.tables.Table;
import java.time.Instant;
import java.util.Iterator;
import java.util.List;
import java.util.function.Supplier;
import java.util.stream.Collectors;
import java.util.stream.Stream;
/**
*
* @author <a href="mailto:altavir@gmail.com">Alexander Nozik</a>
*/
public interface NumassData extends Named, Annotated {
public interface NumassData extends Named, Annotated, Iterable<NMPoint> {
String getDescription();
@Override
Meta meta();
List<NMPoint> getNMPoints();
Stream<NMPoint> stream();
@Override
default Iterator<NMPoint> iterator() {
return stream().iterator();
}
default List<NMPoint> getNMPoints() {
return stream().collect(Collectors.toList());
}
boolean isEmpty();
@ -42,7 +53,7 @@ public interface NumassData extends Named, Annotated {
* @return
*/
default NMPoint getByUset(double U) {
for (NMPoint point : getNMPoints()) {
for (NMPoint point : this) {
if (point.getUset() == U) {
return point;
}
@ -57,7 +68,7 @@ public interface NumassData extends Named, Annotated {
* @return
*/
default NMPoint getByUread(double U) {
for (NMPoint point : getNMPoints()) {
for (NMPoint point : this) {
if (point.getUread() == U) {
return point;
}

View File

@ -44,6 +44,7 @@ import java.util.*;
import java.util.function.Function;
import java.util.function.Supplier;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import static inr.numass.storage.RawNMPoint.MAX_EVENTS_PER_POINT;
import static org.apache.commons.vfs2.FileType.FOLDER;
@ -303,8 +304,8 @@ public class NumassDataLoader extends AbstractLoader implements ObjectLoader<Env
}
@Override
public List<NMPoint> getNMPoints() {
return this.getPoints().stream().parallel().map(this::readPoint).collect(Collectors.toList());
public Stream<NMPoint> stream() {
return this.getPoints().stream().parallel().map(this::readPoint);
}
public List<NMPoint> getNMPoints(Function<RawNMPoint, NMPoint> transformation) {
@ -397,8 +398,8 @@ public class NumassDataLoader extends AbstractLoader implements ObjectLoader<Env
}
@Override
public List<NMPoint> getNMPoints() {
return NumassDataLoader.this.getNMPoints(transform);
public Stream<NMPoint> stream() {
return NumassDataLoader.this.stream();
}
@Override

View File

@ -127,11 +127,8 @@ public class NumassDataReader {
}
private RawNMFile readFile(String name) throws IOException {
RawNMFile file = new RawNMFile(name);
String head = readHead();//2048
file.setHead(head.replaceAll("\u0000", ""));
RawNMFile file = new RawNMFile(name, head.replaceAll("\u0000", ""));
LocalDateTime filedate = readDate(head);
int lab = readByte();

View File

@ -0,0 +1,43 @@
package inr.numass.storage;
import java.time.Instant;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
import java.util.stream.Stream;
/**
* Created by darksnake on 30-Jan-17.
*/
public class NumassDataUtils {
public static Iterable<NMPoint> sumSpectra(Stream<NumassData> spectra) {
Map<Double, NMPoint> map = new HashMap<>();
spectra.forEach(datum -> {
datum.forEach(point -> {
double uset = point.getUset();
if (map.containsKey(uset)) {
map.put(uset, join(point, map.get(uset)));
} else {
map.put(uset, point);
}
});
});
return map.values();
}
private static NMPoint join(NMPoint first, NMPoint second) {
if (first.getUset() != second.getUset()) {
throw new RuntimeException("Voltage mismatch");
}
int[] newArray = new int[first.getSpectrum().length];
Arrays.setAll(newArray, i -> first.getSpectrum()[i] + second.getSpectrum()[i]);
return new NMPoint(
first.getUset(),
first.getUread(),
Instant.EPOCH,
first.getLength() + second.getLength(),
newArray
);
}
}

View File

@ -186,6 +186,7 @@ public class NumassStorage extends FileStorage {
*/
@SuppressWarnings("unchecked")
public void pushNumassData(String fileName, ByteBuffer data) throws StorageException {
//FIXME move zip to internal
try {
FileObject nmFile = getDataDir().resolveFile(fileName + NUMASS_ZIP_EXTENSION);
if (!nmFile.exists()) {

View File

@ -15,7 +15,11 @@
*/
package inr.numass.storage;
import hep.dataforge.description.ValueDef;
import hep.dataforge.meta.Meta;
import hep.dataforge.meta.MetaBuilder;
import hep.dataforge.names.NamedMetaHolder;
import java.io.BufferedOutputStream;
import java.io.OutputStream;
import java.io.PrintWriter;
@ -24,29 +28,34 @@ import java.util.List;
/**
* Contains the whole data but requires a lot of memory
*
* @author Darksnake
*/
@ValueDef(name = "info", info = "file text header")
public class RawNMFile extends NamedMetaHolder {
// public static String TYPE = ":data:numassdatafile";
private final List<RawNMPoint> points;
private String head;
private final List<RawNMPoint> points = new ArrayList<>();
public void setHead(String head) {
this.head = head;
}
public String getHead() {
return head;
}
public RawNMFile(String fileName) {
super(fileName);
this.points = new ArrayList<>();
}
public RawNMFile(String name, Meta meta) {
super(name, meta);
}
public RawNMFile(String name, String header) {
super(name, new MetaBuilder("meta").setValue("info", header));
}
public String getHead() {
return meta().getString("info", "");
}
@Deprecated
public void generatePAW(OutputStream stream) {
PrintWriter writer = new PrintWriter(new BufferedOutputStream(stream));
long counter = 0;
@ -63,8 +72,9 @@ public class RawNMFile extends NamedMetaHolder {
/**
* merge of all point with given Uset
*
* @param U
* @return
* @return
*/
public RawNMPoint getByUset(double U) {
RawNMPoint res = null;
@ -80,18 +90,19 @@ public class RawNMFile extends NamedMetaHolder {
}
return res;
}
/**
* merge of all point with given Uread
*
* @param U
* @return
* @return
*/
public RawNMPoint getByUread(double U) {
RawNMPoint res = null;
for (RawNMPoint point : points) {
if (point.getUread()== U) {
if (point.getUread() == U) {
if (res == null) {
res = point.clone();
} else {
@ -100,7 +111,7 @@ public class RawNMFile extends NamedMetaHolder {
}
}
return res;
}
}
/**
* @return the data