Grind dsl refinement + task & workspace fixes

This commit is contained in:
Alexander Nozik 2016-08-16 18:56:51 +03:00
parent e1d0c50745
commit 867ace2d79
8 changed files with 119 additions and 61 deletions

View File

@ -0,0 +1,41 @@
/*
* Copyright 2015 Alexander Nozik.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package inr.numass
import groovy.transform.CompileStatic
import hep.dataforge.grind.WorkspaceSpec
import inr.numass.tasks.NumassFitScanSummaryTask
import inr.numass.tasks.NumassFitScanTask
import inr.numass.tasks.NumassPrepareTask
import inr.numass.tasks.NumassTableFilterTask
/**
* Created by darksnake on 16-Aug-16.
*/
@CompileStatic
class NumassWorkspaceSpec extends WorkspaceSpec {
NumassWorkspaceSpec() {
//load tasks
super.loadTask(NumassPrepareTask)
super.loadTask(NumassTableFilterTask)
super.loadTask(NumassFitScanTask)
super.loadTask(NumassFitScanSummaryTask)
}
}

View File

@ -11,5 +11,5 @@ import hep.dataforge.io.ColumnedDataWriter
import hep.dataforge.tables.Table
File file = new File("D:\\Work\\Numass\\sterile2016\\empty.dat" )
Table referenceTable = new ColumnedDataReader(file).toDataSet();
Table referenceTable = new ColumnedDataReader(file).toTable();
ColumnedDataWriter.writeDataSet(System.out, referenceTable,"")

View File

@ -31,7 +31,7 @@ public class SubstractSpectrumAction extends OneToOneAction<Table, Table> {
try {
String referencePath = inputMeta.getString("file", "empty.dat");
File referenceFile = getContext().io().getFile(referencePath);
Table referenceTable = new ColumnedDataReader(referenceFile).toDataSet();
Table referenceTable = new ColumnedDataReader(referenceFile).toTable();
ListTable.Builder builder = new ListTable.Builder(input.getFormat());
input.stream().forEach(point -> {
MapPoint.Builder pointBuilder = new MapPoint.Builder(point);

View File

@ -246,9 +246,11 @@ public class LossCalculator {
return cache.get(order);
} else {
synchronized (this) {
LoggerFactory.getLogger(getClass())
.debug("Scatter cache of order {} not found. Updating", order);
cache.putIfAbsent(order, getNextLoss(getMargin(order), getLoss(order - 1)));
cache.computeIfAbsent(order, (i) -> {
LoggerFactory.getLogger(getClass())
.debug("Scatter cache of order {} not found. Updating", i);
return getNextLoss(getMargin(i), getLoss(i - 1));
});
return cache.get(order);
}
}
@ -277,7 +279,7 @@ public class LossCalculator {
/**
* рекурсивно вычисляем все вероятности, котрорые выше порога
*
* <p>
* дисер, стр.48
*
* @param X

View File

@ -16,6 +16,7 @@ import hep.dataforge.io.ColumnedDataWriter;
import hep.dataforge.meta.Meta;
import hep.dataforge.stat.fit.FitState;
import hep.dataforge.stat.fit.ParamSet;
import hep.dataforge.stat.fit.UpperLimitGenerator;
import hep.dataforge.tables.ListTable;
import hep.dataforge.tables.Table;
import hep.dataforge.workspace.GenericTask;
@ -34,8 +35,14 @@ public class NumassFitScanSummaryTask extends GenericTask {
protected void transform(WorkManager.Callback callback, Context context, TaskState state, Meta config) {
DataSet.Builder<Table> builder = DataSet.builder(Table.class);
Action<FitState, Table> action = new FitSummaryAction().withContext(context);
state.getData().getNode("fitscan").get().nodeStream().forEach(node ->
DataNode<FitState> data = state.getData().getCheckedNode("fitscan", FitState.class);
data.nodeStream().forEach(node ->
builder.putData(node.getName(), action.run((DataNode<FitState>) node, config).getData()));
// if (data.nodeStream().count() > 1) {
//merge tables if there is more than one
// }
state.finish(builder.build());
}
@ -56,12 +63,23 @@ public class NumassFitScanSummaryTask extends GenericTask {
@Override
protected Table execute(String nodeName, Map<String, FitState> input, Meta meta) {
ListTable.Builder builder = new ListTable.Builder("msterile2", "U2", "U2err", "E0", "trap");
ListTable.Builder builder = new ListTable.Builder("msterile2", "U2", "U2err", "U2limit", "E0", "trap");
input.forEach((key, fitRes) -> {
ParamSet pars = fitRes.getParameters();
double u2Val = pars.getDouble("U2") / pars.getError("U2");
double limit;
if (Math.abs(u2Val) < 3) {
limit = UpperLimitGenerator.getConfidenceLimit(u2Val) * pars.getError("U2");
} else {
limit = Double.NaN;
}
builder.row(pars.getValue("msterile2"),
pars.getValue("U2"),
pars.getError("U2"),
limit,
pars.getValue("E0"),
pars.getValue("trap"));
});

View File

@ -49,7 +49,7 @@ public class NumassFitScanTask extends GenericTask {
overrideMeta.getNodes("params.param").stream()
.filter(par -> par.getString("name") == scanParameter).forEach(par -> par.setValue("value", val));
// Data<Table> newData = new Data<Table>(data.getGoal(),data.dataType(),overrideMeta);
// Data<Table> newData = new Data<Table>(data.getGoal(),data.type(),overrideMeta);
DataNode node = action.run(DataNode.of("fit_" + i, data, Meta.empty()), overrideMeta);
resultBuilder.putData(data.getName() + ".fit_" + i, node.getData());
}

View File

@ -1,16 +1,18 @@
package inr.numass.tasks;
import hep.dataforge.grind.JavaGrindLauncher;
import inr.numass.NumassWorkspaceSpec;
import java.io.File;
/**
* Created by darksnake on 12-Aug-16.
*/
public class GrindCaller {
public class NumassGrindLauncher {
public static void main(String[] args) throws Exception {
JavaGrindLauncher.buildWorkspace(new File("D:\\Work\\Numass\\sterile2016\\workspace.groovy")).runTask("numass.fitsum", "fill_2").computeAll();
JavaGrindLauncher.buildWorkspace(new File("D:\\Work\\Numass\\sterile2016\\workspace.groovy"), NumassWorkspaceSpec.class)
.runTask("numass.fitsum", "fill_3").computeAll();
}
}

View File

@ -29,7 +29,11 @@ import hep.dataforge.storage.filestorage.FileEnvelope;
import hep.dataforge.storage.loaders.AbstractLoader;
import hep.dataforge.tables.Table;
import hep.dataforge.values.Value;
import static inr.numass.storage.RawNMPoint.MAX_EVENTS_PER_POINT;
import org.apache.commons.vfs2.FileObject;
import org.apache.commons.vfs2.FileSystemException;
import org.apache.commons.vfs2.VFS;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
@ -38,21 +42,13 @@ import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.text.ParseException;
import java.time.Instant;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Scanner;
import java.util.*;
import java.util.function.Function;
import java.util.function.Supplier;
import java.util.stream.Collectors;
import org.apache.commons.vfs2.FileObject;
import org.apache.commons.vfs2.FileSystemException;
import static inr.numass.storage.RawNMPoint.MAX_EVENTS_PER_POINT;
import static org.apache.commons.vfs2.FileType.FOLDER;
import org.apache.commons.vfs2.VFS;
import org.slf4j.LoggerFactory;
/**
* The reader for numass main detector data directory or zip format;
@ -76,6 +72,19 @@ public class NumassDataLoader extends AbstractLoader implements ObjectLoader<Env
* The beginning of hv fragment name
*/
public static final String HV_FRAGMENT_NAME = "voltage";
private final Map<String, Supplier<Envelope>> itemsProvider;
private NumassDataLoader(Storage storage, String name, Meta annotation) {
super(storage, name, annotation);
itemsProvider = new HashMap<>();
readOnly = true;
}
private NumassDataLoader(Storage storage, String name, Meta annotation, Map<String, Supplier<Envelope>> items) {
super(storage, name, annotation);
this.itemsProvider = items;
readOnly = true;
}
public static NumassDataLoader fromLocalDir(Storage storage, File directory) throws IOException {
return fromDir(storage, VFS.getManager().toFileObject(directory), null);
@ -157,6 +166,29 @@ public class NumassDataLoader extends AbstractLoader implements ObjectLoader<Env
}
}
/**
* "start_time": "2016-04-20T04:08:50",
*
* @param meta
* @return
*/
private static Instant readTime(Meta meta) {
if (meta.hasValue("start_time")) {
return meta.getValue("start_time").timeValue();
} else {
return Instant.EPOCH;
}
}
private static Envelope readStream(InputStream stream) {
try {
return new DefaultEnvelopeReader().read(stream);
} catch (IOException ex) {
LoggerFactory.getLogger(NumassDataLoader.class).warn("Can't read a fragment from numass zip or directory", ex);
return null;
}
}
/**
* Read numass point from envelope and apply transformation (e.g. debuncing)
*
@ -211,20 +243,6 @@ public class NumassDataLoader extends AbstractLoader implements ObjectLoader<Env
return transformation.apply(raw);
}
/**
* "start_time": "2016-04-20T04:08:50",
*
* @param meta
* @return
*/
private static Instant readTime(Meta meta) {
if (meta.hasValue("start_time")) {
return meta.getValue("start_time").timeValue();
} else {
return Instant.EPOCH;
}
}
/**
* Read numass point without transformation
*
@ -235,29 +253,6 @@ public class NumassDataLoader extends AbstractLoader implements ObjectLoader<Env
return readPoint(envelope, (p) -> new NMPoint(p));
}
private static Envelope readStream(InputStream stream) {
try {
return new DefaultEnvelopeReader().read(stream);
} catch (IOException ex) {
LoggerFactory.getLogger(NumassDataLoader.class).warn("Can't read a fragment from numass zip or directory", ex);
return null;
}
}
private final Map<String, Supplier<Envelope>> itemsProvider;
private NumassDataLoader(Storage storage, String name, Meta annotation) {
super(storage, name, annotation);
itemsProvider = new HashMap<>();
readOnly = true;
}
private NumassDataLoader(Storage storage, String name, Meta annotation, Map<String, Supplier<Envelope>> items) {
super(storage, name, annotation);
this.itemsProvider = items;
readOnly = true;
}
private Map<String, Supplier<Envelope>> getItems() {
return itemsProvider;
}
@ -284,7 +279,7 @@ public class NumassDataLoader extends AbstractLoader implements ObjectLoader<Env
}
return () -> {
try {
return new ColumnedDataReader(hvEnvelope.getData().getStream(), "timestamp", "block", "value").toDataSet();
return new ColumnedDataReader(hvEnvelope.getData().getStream(), "timestamp", "block", "value").toTable();
} catch (IOException ex) {
LoggerFactory.getLogger(getClass()).error("Failed to load HV data from file", ex);
return null;