Spectrum merger

This commit is contained in:
darksnake 2017-01-30 16:59:46 +03:00
parent b97ac722ed
commit 530bc869d3
14 changed files with 142 additions and 91 deletions

View File

@ -7,20 +7,29 @@
package inr.numass.scripts package inr.numass.scripts
import hep.dataforge.io.ColumnedDataWriter import hep.dataforge.io.ColumnedDataWriter
import hep.dataforge.storage.commons.StorageUtils
import hep.dataforge.tables.Table import hep.dataforge.tables.Table
import inr.numass.storage.NumassData import inr.numass.storage.NMPoint
import inr.numass.storage.NumassDataLoader import inr.numass.storage.NumassDataUtils
import inr.numass.storage.NumassStorage
import inr.numass.utils.UnderflowCorrection import inr.numass.utils.UnderflowCorrection
//File dataDir = new File("D:\\Work\\Numass\\data\\2016_04\\T2_data\\Fill_2_2\\set_7_b2a3433e54010000")
//File dataDir = new File("D:\\Work\\Numass\\data\\2016_04\\T2_data\\Fill_2_2\\set_6_e26d123e54010000")
//File dataDir = new File("D:\\Work\\Numass\\data\\2016_10\\Fill_1\\set_28") //File dataDir = new File("D:\\Work\\Numass\\data\\2016_10\\Fill_1\\set_28")
File dataDir = new File("D:\\Work\\Numass\\data\\2016_10\\Fill_2_wide\\set_31") //File dataDir = new File("D:\\Work\\Numass\\data\\2016_10\\Fill_2_wide\\set_31")
if(!dataDir.exists()){
println "dataDir directory does not exist" File rootDir = new File("D:\\Work\\Numass\\data\\2016_10\\Fill_2_wide")
}
NumassData data = NumassDataLoader.fromLocalDir(null, dataDir) NumassStorage storage = NumassStorage.buildLocalNumassRoot(rootDir, true);
//NumassData data = NMFile.readFile(new File("D:\\Work\\Numass\\sterilie2013-2014\\dat\\2013\\SCAN06.DAT" ))
Iterable<NMPoint> data = NumassDataUtils.sumSpectra(
StorageUtils.loaderStream(storage).map { it.value }.filter { it.name.matches("set_.{2,3}") }
)
//if(!dataDir.exists()){
// println "dataDir directory does not exist"
//}
//NumassData data = NumassDataLoader.fromLocalDir(null, dataDir)
////NumassData data = NMFile.readFile(new File("D:\\Work\\Numass\\sterilie2013-2014\\dat\\2013\\SCAN06.DAT" ))
Table t = new UnderflowCorrection().fitAllPoints(data, 400, 650, 3100, 20); Table t = new UnderflowCorrection().fitAllPoints(data, 400, 650, 3100, 20);
ColumnedDataWriter.writeDataSet(System.out, t, "underflow parameters") ColumnedDataWriter.writeDataSet(System.out, t, "underflow parameters")

View File

@ -49,8 +49,7 @@ public class DebunchAction extends OneToOneAction<RawNMFile, RawNMFile> {
double framelength = meta.getDouble("framelength", 5); double framelength = meta.getDouble("framelength", 5);
double maxCR = meta.getDouble("maxcr", 100d); double maxCR = meta.getDouble("maxcr", 100d);
RawNMFile res = new RawNMFile(source.getName()); RawNMFile res = new RawNMFile(source.getName(), source.getHead());
res.setHead(source.getHead());
source.getData().stream().map((point) -> { source.getData().stream().map((point) -> {
double cr = point.selectChanels(lower, upper).getCR(); double cr = point.selectChanels(lower, upper).getCR();
if (cr < maxCR) { if (cr < maxCR) {

View File

@ -86,7 +86,7 @@ public class FindBorderAction extends OneToOneAction<NumassData, Table> {
} }
private void fill(ListTable.Builder dataBuilder, NumassData file, int lower, int upper, NMPoint reference) { private void fill(ListTable.Builder dataBuilder, NumassData file, int lower, int upper, NMPoint reference) {
for (NMPoint point : file.getNMPoints()) { for (NMPoint point : file) {
if ((reference != null) && (point.getUset() == reference.getUset())) { if ((reference != null) && (point.getUset() == reference.getUset())) {
continue; continue;
} }

View File

@ -47,7 +47,7 @@ public class MergeDataAction extends ManyToOneAction<Table, Table> {
if (meta.hasValue("grouping.byValue")) { if (meta.hasValue("grouping.byValue")) {
groups = super.buildGroups(context, input, actionMeta); groups = super.buildGroups(context, input, actionMeta);
} else { } else {
groups = GroupBuilder.byValue(MERGE_NAME, meta.getString(MERGE_NAME, "merge")).group(input); groups = GroupBuilder.byValue(MERGE_NAME, meta.getString(MERGE_NAME, input.getName())).group(input);
} }
return groups; return groups;
} }

View File

@ -100,7 +100,7 @@ public class PrepareDataAction extends OneToOneAction<NumassData, Table> {
} }
List<DataPoint> dataList = new ArrayList<>(); List<DataPoint> dataList = new ArrayList<>();
for (NMPoint point : dataFile.getNMPoints()) { for (NMPoint point : dataFile) {
long total = point.getEventsCount(); long total = point.getEventsCount();
double uset = utransform.apply(point.getUset()); double uset = utransform.apply(point.getUset());

View File

@ -37,6 +37,11 @@ public class SlicedData extends SimplePointSource {
private static final String UNAME = "U"; private static final String UNAME = "U";
public SlicedData(NMFile file, Map<String, Pair<Integer, Integer>> intervals, boolean normalize) {
super(prepateFormat(intervals));
fill(file, intervals, normalize);
}
private static TableFormat prepateFormat(Map<String,Pair<Integer,Integer>> intervals){ private static TableFormat prepateFormat(Map<String,Pair<Integer,Integer>> intervals){
ArrayList<String> names = new ArrayList<>(intervals.keySet()); ArrayList<String> names = new ArrayList<>(intervals.keySet());
names.add(0, TNAME); names.add(0, TNAME);
@ -44,15 +49,8 @@ public class SlicedData extends SimplePointSource {
return TableFormat.forNames(names); return TableFormat.forNames(names);
} }
public SlicedData(NMFile file, Map<String,Pair<Integer,Integer>> intervals, boolean normalize) {
super(prepateFormat(intervals));
fill(file, intervals, normalize);
}
private void fill(NMFile file, Map<String,Pair<Integer,Integer>> intervals, boolean normalize){ private void fill(NMFile file, Map<String,Pair<Integer,Integer>> intervals, boolean normalize){
for (NMPoint point : file.getNMPoints()) { for (NMPoint point : file) {
//создаем основу для будущей точки //создаем основу для будущей точки
HashMap<String,Value> map = new HashMap<>(); HashMap<String,Value> map = new HashMap<>();

View File

@ -10,7 +10,6 @@ import hep.dataforge.meta.Meta;
import hep.dataforge.tables.ListTable; import hep.dataforge.tables.ListTable;
import hep.dataforge.tables.Table; import hep.dataforge.tables.Table;
import inr.numass.storage.NMPoint; import inr.numass.storage.NMPoint;
import inr.numass.storage.NumassData;
import inr.numass.storage.RawNMPoint; import inr.numass.storage.RawNMPoint;
import org.apache.commons.math3.analysis.ParametricUnivariateFunction; import org.apache.commons.math3.analysis.ParametricUnivariateFunction;
import org.apache.commons.math3.exception.DimensionMismatchException; import org.apache.commons.math3.exception.DimensionMismatchException;
@ -51,18 +50,18 @@ public class UnderflowCorrection {
} }
} }
public Table fitAllPoints(NumassData data, int xLow, int xHigh, int binning) { public Table fitAllPoints(Iterable<NMPoint> data, int xLow, int xHigh, int binning) {
ListTable.Builder builder = new ListTable.Builder("U", "amp", "expConst"); ListTable.Builder builder = new ListTable.Builder("U", "amp", "expConst");
for (NMPoint point : data.getNMPoints()) { for (NMPoint point : data) {
double[] fitRes = getUnderflowExpParameters(point, xLow, xHigh, binning); double[] fitRes = getUnderflowExpParameters(point, xLow, xHigh, binning);
builder.row(point.getUset(), fitRes[0], fitRes[1]); builder.row(point.getUset(), fitRes[0], fitRes[1]);
} }
return builder.build(); return builder.build();
} }
public Table fitAllPoints(NumassData data, int xLow, int xHigh, int upper, int binning) { public Table fitAllPoints(Iterable<NMPoint> data, int xLow, int xHigh, int upper, int binning) {
ListTable.Builder builder = new ListTable.Builder("U", "amp", "expConst", "correction"); ListTable.Builder builder = new ListTable.Builder("U", "amp", "expConst", "correction");
for (NMPoint point : data.getNMPoints()) { for (NMPoint point : data) {
double norm = ((double) point.getCountInWindow(xLow, upper))/point.getLength(); double norm = ((double) point.getCountInWindow(xLow, upper))/point.getLength();
double[] fitRes = getUnderflowExpParameters(point, xLow, xHigh, binning); double[] fitRes = getUnderflowExpParameters(point, xLow, xHigh, binning);
builder.row(point.getUset(), fitRes[0], fitRes[1], fitRes[0] * fitRes[1] * (Math.exp(xLow / fitRes[1]) - 1d) / norm + 1d); builder.row(point.getUset(), fitRes[0], fitRes[1], fitRes[0] * fitRes[1] * (Math.exp(xLow / fitRes[1]) - 1d) / norm + 1d);

View File

@ -17,17 +17,17 @@ package inr.numass.storage;
import hep.dataforge.description.ValueDef; import hep.dataforge.description.ValueDef;
import hep.dataforge.meta.Meta; import hep.dataforge.meta.Meta;
import hep.dataforge.meta.MetaBuilder;
import hep.dataforge.names.NamedMetaHolder; import hep.dataforge.names.NamedMetaHolder;
import java.io.File; import java.io.File;
import java.io.IOException; import java.io.IOException;
import java.io.InputStream; import java.io.InputStream;
import java.time.Instant; import java.time.Instant;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.stream.Stream;
/** /**
*
* Объект, содержащий только спектры, но не сами события * Объект, содержащий только спектры, но не сами события
* *
* @author Darksnake * @author Darksnake
@ -36,50 +36,32 @@ import java.util.List;
@ValueDef(name = "numass.name", info = "The name of this data file.") @ValueDef(name = "numass.name", info = "The name of this data file.")
public class NMFile extends NamedMetaHolder implements NumassData { public class NMFile extends NamedMetaHolder implements NumassData {
public static NMFile readStream(InputStream is, String fname, Meta config) throws IOException{
return new NMFile(new NumassDataReader(is, fname, config).read());
}
public static NMFile readFile(File file) throws IOException{
return new NMFile(new NumassDataReader(file).read());
}
private final String head;
private final List<NMPoint> points; private final List<NMPoint> points;
public NMFile(RawNMFile file) { public NMFile(RawNMFile file) {
super(file.getName(), file.meta()); super(file.getName(), file.meta());
this.head = file.getHead();
points = new ArrayList<>(); points = new ArrayList<>();
for (RawNMPoint point : file.getData()) { for (RawNMPoint point : file.getData()) {
points.add(new NMPoint(point)); points.add(new NMPoint(point));
} }
} }
public static NMFile readStream(InputStream is, String fname, Meta config) throws IOException {
return new NMFile(new NumassDataReader(is, fname, config).read());
}
public static NMFile readFile(File file) throws IOException {
return new NMFile(new NumassDataReader(file).read());
}
@Override @Override
public String getDescription() { public String getDescription() {
return ""; return "";
} }
/**
* @return the head
*/
public String getHead() {
return head;
}
@Override @Override
public Meta meta() { public Stream<NMPoint> stream() {
return new MetaBuilder("info").setValue("info", head); return points.stream();
}
/**
* @return the points
*/
@Override
public List<NMPoint> getNMPoints() {
return points;
} }
@Override @Override

View File

@ -11,21 +11,32 @@ import hep.dataforge.names.Named;
import hep.dataforge.tables.Table; import hep.dataforge.tables.Table;
import java.time.Instant; import java.time.Instant;
import java.util.Iterator;
import java.util.List; import java.util.List;
import java.util.function.Supplier; import java.util.function.Supplier;
import java.util.stream.Collectors;
import java.util.stream.Stream;
/** /**
*
* @author <a href="mailto:altavir@gmail.com">Alexander Nozik</a> * @author <a href="mailto:altavir@gmail.com">Alexander Nozik</a>
*/ */
public interface NumassData extends Named, Annotated { public interface NumassData extends Named, Annotated, Iterable<NMPoint> {
String getDescription(); String getDescription();
@Override @Override
Meta meta(); Meta meta();
List<NMPoint> getNMPoints(); Stream<NMPoint> stream();
@Override
default Iterator<NMPoint> iterator() {
return stream().iterator();
}
default List<NMPoint> getNMPoints() {
return stream().collect(Collectors.toList());
}
boolean isEmpty(); boolean isEmpty();
@ -42,7 +53,7 @@ public interface NumassData extends Named, Annotated {
* @return * @return
*/ */
default NMPoint getByUset(double U) { default NMPoint getByUset(double U) {
for (NMPoint point : getNMPoints()) { for (NMPoint point : this) {
if (point.getUset() == U) { if (point.getUset() == U) {
return point; return point;
} }
@ -57,7 +68,7 @@ public interface NumassData extends Named, Annotated {
* @return * @return
*/ */
default NMPoint getByUread(double U) { default NMPoint getByUread(double U) {
for (NMPoint point : getNMPoints()) { for (NMPoint point : this) {
if (point.getUread() == U) { if (point.getUread() == U) {
return point; return point;
} }

View File

@ -44,6 +44,7 @@ import java.util.*;
import java.util.function.Function; import java.util.function.Function;
import java.util.function.Supplier; import java.util.function.Supplier;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import java.util.stream.Stream;
import static inr.numass.storage.RawNMPoint.MAX_EVENTS_PER_POINT; import static inr.numass.storage.RawNMPoint.MAX_EVENTS_PER_POINT;
import static org.apache.commons.vfs2.FileType.FOLDER; import static org.apache.commons.vfs2.FileType.FOLDER;
@ -303,8 +304,8 @@ public class NumassDataLoader extends AbstractLoader implements ObjectLoader<Env
} }
@Override @Override
public List<NMPoint> getNMPoints() { public Stream<NMPoint> stream() {
return this.getPoints().stream().parallel().map(this::readPoint).collect(Collectors.toList()); return this.getPoints().stream().parallel().map(this::readPoint);
} }
public List<NMPoint> getNMPoints(Function<RawNMPoint, NMPoint> transformation) { public List<NMPoint> getNMPoints(Function<RawNMPoint, NMPoint> transformation) {
@ -397,8 +398,8 @@ public class NumassDataLoader extends AbstractLoader implements ObjectLoader<Env
} }
@Override @Override
public List<NMPoint> getNMPoints() { public Stream<NMPoint> stream() {
return NumassDataLoader.this.getNMPoints(transform); return NumassDataLoader.this.stream();
} }
@Override @Override

View File

@ -127,11 +127,8 @@ public class NumassDataReader {
} }
private RawNMFile readFile(String name) throws IOException { private RawNMFile readFile(String name) throws IOException {
RawNMFile file = new RawNMFile(name);
String head = readHead();//2048 String head = readHead();//2048
file.setHead(head.replaceAll("\u0000", "")); RawNMFile file = new RawNMFile(name, head.replaceAll("\u0000", ""));
LocalDateTime filedate = readDate(head); LocalDateTime filedate = readDate(head);
int lab = readByte(); int lab = readByte();

View File

@ -0,0 +1,43 @@
package inr.numass.storage;
import java.time.Instant;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
import java.util.stream.Stream;
/**
* Created by darksnake on 30-Jan-17.
*/
public class NumassDataUtils {
public static Iterable<NMPoint> sumSpectra(Stream<NumassData> spectra) {
Map<Double, NMPoint> map = new HashMap<>();
spectra.forEach(datum -> {
datum.forEach(point -> {
double uset = point.getUset();
if (map.containsKey(uset)) {
map.put(uset, join(point, map.get(uset)));
} else {
map.put(uset, point);
}
});
});
return map.values();
}
private static NMPoint join(NMPoint first, NMPoint second) {
if (first.getUset() != second.getUset()) {
throw new RuntimeException("Voltage mismatch");
}
int[] newArray = new int[first.getSpectrum().length];
Arrays.setAll(newArray, i -> first.getSpectrum()[i] + second.getSpectrum()[i]);
return new NMPoint(
first.getUset(),
first.getUread(),
Instant.EPOCH,
first.getLength() + second.getLength(),
newArray
);
}
}

View File

@ -186,6 +186,7 @@ public class NumassStorage extends FileStorage {
*/ */
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
public void pushNumassData(String fileName, ByteBuffer data) throws StorageException { public void pushNumassData(String fileName, ByteBuffer data) throws StorageException {
//FIXME move zip to internal
try { try {
FileObject nmFile = getDataDir().resolveFile(fileName + NUMASS_ZIP_EXTENSION); FileObject nmFile = getDataDir().resolveFile(fileName + NUMASS_ZIP_EXTENSION);
if (!nmFile.exists()) { if (!nmFile.exists()) {

View File

@ -15,7 +15,11 @@
*/ */
package inr.numass.storage; package inr.numass.storage;
import hep.dataforge.description.ValueDef;
import hep.dataforge.meta.Meta;
import hep.dataforge.meta.MetaBuilder;
import hep.dataforge.names.NamedMetaHolder; import hep.dataforge.names.NamedMetaHolder;
import java.io.BufferedOutputStream; import java.io.BufferedOutputStream;
import java.io.OutputStream; import java.io.OutputStream;
import java.io.PrintWriter; import java.io.PrintWriter;
@ -24,29 +28,34 @@ import java.util.List;
/** /**
* Contains the whole data but requires a lot of memory * Contains the whole data but requires a lot of memory
*
* @author Darksnake * @author Darksnake
*/ */
@ValueDef(name = "info", info = "file text header")
public class RawNMFile extends NamedMetaHolder { public class RawNMFile extends NamedMetaHolder {
// public static String TYPE = ":data:numassdatafile"; // public static String TYPE = ":data:numassdatafile";
private final List<RawNMPoint> points; private final List<RawNMPoint> points = new ArrayList<>();
private String head;
public void setHead(String head) {
this.head = head;
}
public String getHead() {
return head;
}
public RawNMFile(String fileName) { public RawNMFile(String fileName) {
super(fileName); super(fileName);
this.points = new ArrayList<>();
} }
public RawNMFile(String name, Meta meta) {
super(name, meta);
}
public RawNMFile(String name, String header) {
super(name, new MetaBuilder("meta").setValue("info", header));
}
public String getHead() {
return meta().getString("info", "");
}
@Deprecated
public void generatePAW(OutputStream stream) { public void generatePAW(OutputStream stream) {
PrintWriter writer = new PrintWriter(new BufferedOutputStream(stream)); PrintWriter writer = new PrintWriter(new BufferedOutputStream(stream));
long counter = 0; long counter = 0;
@ -63,6 +72,7 @@ public class RawNMFile extends NamedMetaHolder {
/** /**
* merge of all point with given Uset * merge of all point with given Uset
*
* @param U * @param U
* @return * @return
*/ */
@ -84,6 +94,7 @@ public class RawNMFile extends NamedMetaHolder {
/** /**
* merge of all point with given Uread * merge of all point with given Uread
*
* @param U * @param U
* @return * @return
*/ */
@ -91,7 +102,7 @@ public class RawNMFile extends NamedMetaHolder {
RawNMPoint res = null; RawNMPoint res = null;
for (RawNMPoint point : points) { for (RawNMPoint point : points) {
if (point.getUread()== U) { if (point.getUread() == U) {
if (res == null) { if (res == null) {
res = point.clone(); res = point.clone();
} else { } else {