Fix context and log issues in Grind

This commit is contained in:
Alexander Nozik 2016-12-18 17:11:24 +03:00
parent c63de01a42
commit 99449c82c9
13 changed files with 66 additions and 80 deletions

View File

@ -11,8 +11,6 @@ dependencies {
compile group: 'commons-cli', name: 'commons-cli', version: '1.+'
compile group: 'commons-io', name: 'commons-io', version: '2.+'
compile project(':dataforge-stat:dataforge-minuit')
compile project(':dataforge-fx')
compile project(':dataforge-plots:plots-jfc')
compile project(':numass-storage')
compile project(':dataforge-grind:grind-terminal')
}

View File

@ -3,6 +3,7 @@ package inr.numass
import hep.dataforge.context.Global
import hep.dataforge.grind.GrindWorkspaceBuilder
import hep.dataforge.grind.terminal.GrindTerminal
import inr.numass.workspace.*
/**
* Created by darksnake on 29-Aug-16.
@ -22,10 +23,16 @@ println "Starting Grind shell"
if (cfgPath) {
try {
GrindTerminal.dumb().launch {
GrindWorkspaceBuilder numass = new GrindWorkspaceBuilder()
.withSpec(NumassWorkspaceSpec)
.from(new File(cfgPath))
shell.bind("numass", numass)
GrindWorkspaceBuilder numass = new GrindWorkspaceBuilder(it.shell.context).read(new File(cfgPath)).startup {
it.loadTask(NumassPrepareTask)
it.loadTask(NumassTableFilterTask)
it.loadTask(NumassFitScanTask)
it.loadTask(NumassSubstractEmptySourceTask)
it.loadTask(NumassFitScanSummaryTask)
it.loadTask(NumassFitTask)
it.loadTask(NumassFitSummaryTask)
}
it.shell.bind("numass", numass)
}
} catch (Exception ex) {
ex.printStackTrace();

View File

@ -1,41 +0,0 @@
/*
* Copyright 2015 Alexander Nozik.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package inr.numass
import groovy.transform.CompileStatic
import hep.dataforge.grind.WorkspaceSpec
import inr.numass.workspace.*
/**
* Created by darksnake on 16-Aug-16.
*/
@CompileStatic
class NumassWorkspaceSpec extends WorkspaceSpec {
NumassWorkspaceSpec() {
//load tasks
super.task(NumassPrepareTask)
super.task(NumassTableFilterTask)
super.task(NumassFitScanTask)
super.task(NumassSubstractEmptySourceTask)
super.task(NumassFitScanSummaryTask)
super.task(NumassFitTask)
super.task(NumassFitSummaryTask)
}
}

View File

@ -7,7 +7,7 @@ import hep.dataforge.grind.GrindWorkspaceBuilder
*/
new GrindWorkspaceBuilder().from {
new GrindWorkspaceBuilder().read {
new File("D:\\Work\\Numass\\sterile2016\\workspace.groovy")
}.runTask("numass.prepare", "fill_2").computeAll()

View File

@ -44,7 +44,7 @@ import org.apache.commons.math3.util.FastMath;
* @author Alexander Nozik
*/
@PluginDef(group = "inr.numass", name = "numass",
dependsOn = {"hep.dataforge:actions", "hep.dataforge:math", "hep.dataforge:MINUIT", "hep.dataforge:plots-jfc"},
dependsOn = {"hep.dataforge:actions", "hep.dataforge:math", "hep.dataforge:MINUIT"},
description = "Numass data analysis tools")
public class NumassPlugin extends BasicPlugin {

View File

@ -37,7 +37,7 @@ import java.util.List;
import java.util.function.Function;
import java.util.stream.Collectors;
import static inr.numass.utils.TritiumUtils.evaluateExpression;
import static inr.numass.utils.TritiumUtils.pointExpression;
/**
* @author Darksnake
@ -80,7 +80,7 @@ public class PrepareDataAction extends OneToOneAction<NumassData, Table> {
if (meta.hasValue("correction")) {
final String correction = meta.getString("correction");
corrections.add((point) -> evaluateExpression(point, correction));
corrections.add((point) -> pointExpression(correction, point));
}
List<DataPoint> dataList = new ArrayList<>();
@ -153,7 +153,7 @@ public class PrepareDataAction extends OneToOneAction<NumassData, Table> {
return new Correction() {
@Override
public double corr(NMPoint point) {
return evaluateExpression(point, expr);
return pointExpression(expr, point);
}
@Override
@ -161,7 +161,7 @@ public class PrepareDataAction extends OneToOneAction<NumassData, Table> {
if (errExpr.isEmpty()) {
return 0;
} else {
return evaluateExpression(point, errExpr);
return pointExpression(errExpr, point);
}
}
};
@ -202,7 +202,7 @@ public class PrepareDataAction extends OneToOneAction<NumassData, Table> {
private final Function<NMPoint, Double> deadTimeFunction;
public DeadTimeCorrection(String expr) {
deadTimeFunction = point -> evaluateExpression(point, expr);
deadTimeFunction = point -> pointExpression(expr, point);
}
@Override

View File

@ -15,21 +15,23 @@
*/
package inr.numass.models;
import hep.dataforge.stat.parametric.AbstractParametricFunction;
import static hep.dataforge.stat.parametric.FunctionUtils.getSpectrumDerivativeFunction;
import static hep.dataforge.stat.parametric.FunctionUtils.getSpectrumFunction;
import hep.dataforge.stat.parametric.ParametricFunction;
import hep.dataforge.maths.MathUtils;
import hep.dataforge.maths.NamedVector;
import hep.dataforge.names.AbstractNamedSet;
import hep.dataforge.stat.parametric.AbstractParametricFunction;
import hep.dataforge.stat.parametric.ParametricFunction;
import hep.dataforge.values.NamedValueSet;
import java.util.HashMap;
import java.util.Map;
import java.util.Objects;
import org.apache.commons.math3.analysis.UnivariateFunction;
import org.apache.commons.math3.analysis.interpolation.SplineInterpolator;
import org.slf4j.LoggerFactory;
import java.util.HashMap;
import java.util.Map;
import java.util.Objects;
import static hep.dataforge.stat.parametric.FunctionUtils.getSpectrumDerivativeFunction;
import static hep.dataforge.stat.parametric.FunctionUtils.getSpectrumFunction;
/**
*
* @author Darksnake
@ -52,7 +54,7 @@ public class NamedSpectrumCaching extends AbstractParametricFunction {
this.a = a;
this.b = b;
this.source = spectrum;
spectrumDerivCache = new HashMap<>(source.getDimension());
spectrumDerivCache = new HashMap<>(source.size());
// spectrumDerivCache = new CacheElement[source.getDimension()];
}

View File

@ -41,7 +41,7 @@ public class NumassTransmission extends AbstractParametricBiFunction {
Map<String, Object> binding = new HashMap<>();
binding.put("Ei", Ei);
binding.put("Ef", Ef);
return ExpressionUtils.evaluate(trapFuncStr, binding);
return ExpressionUtils.function(trapFuncStr, binding);
};
}
} else {

View File

@ -8,7 +8,7 @@ package inr.numass.utils;
import groovy.lang.Binding;
import groovy.lang.GroovyShell;
import groovy.lang.Script;
import hep.dataforge.utils.Utils;
import hep.dataforge.utils.Misc;
import org.codehaus.groovy.control.CompilerConfiguration;
import org.codehaus.groovy.control.customizers.ImportCustomizer;
@ -18,7 +18,7 @@ import java.util.Map;
* @author Alexander Nozik
*/
public class ExpressionUtils {
private static final Map<String, Script> cache = Utils.getLRUCache(100);
private static final Map<String, Script> cache = Misc.getLRUCache(100);
private static final GroovyShell shell;
static {
@ -38,7 +38,7 @@ public class ExpressionUtils {
}
public static double evaluate(String expression, Map<String, Object> binding) {
public static double function(String expression, Map<String, ? extends Object> binding) {
synchronized (cache) {
Binding b = new Binding(binding);
Script script = getScript(expression);
@ -46,4 +46,13 @@ public class ExpressionUtils {
return ((Number) script.run()).doubleValue();
}
}
public static boolean condition(String expression, Map<String, ? extends Object> binding){
synchronized (cache) {
Binding b = new Binding(binding);
Script script = getScript(expression);
script.setBinding(b);
return (boolean) script.run();
}
}
}

View File

@ -129,16 +129,16 @@ public class TritiumUtils {
/**
* Evaluate groovy expression using numass point as parameter
*
* @param point
* @param expression
* @param point
* @return
*/
public static double evaluateExpression(NMPoint point, String expression) {
public static double pointExpression(String expression, NMPoint point) {
Map<String, Object> exprParams = new HashMap<>();
exprParams.put("T", point.getLength());
exprParams.put("U", point.getUread());
exprParams.put("cr", ((double) point.getEventsCount()) / point.getLength());
exprParams.put("point", point);
return ExpressionUtils.evaluate(expression, exprParams);
return ExpressionUtils.function(expression, exprParams);
}
}

View File

@ -30,7 +30,7 @@ public class UnderflowCorrection {
public double get(Logable log, Meta meta, NMPoint point) {
if (point.getUset() >= meta.getDouble("underflow.threshold", 17000)) {
if (meta.hasValue("underflow.function")) {
return TritiumUtils.evaluateExpression(point, meta.getString("underflow.function"));
return TritiumUtils.pointExpression(meta.getString("underflow.function"), point);
} else {
return 1;
}

View File

@ -7,10 +7,14 @@ import hep.dataforge.data.DataNode;
import hep.dataforge.description.TypedActionDef;
import hep.dataforge.meta.Laminate;
import hep.dataforge.meta.MetaBuilder;
import hep.dataforge.tables.DataPoint;
import hep.dataforge.tables.Table;
import hep.dataforge.tables.TableTransform;
import hep.dataforge.workspace.SingleActionTask;
import hep.dataforge.workspace.TaskModel;
import inr.numass.utils.ExpressionUtils;
import java.util.function.Predicate;
/**
* Created by darksnake on 13-Aug-16.
@ -47,10 +51,17 @@ public class NumassTableFilterTask extends SingleActionTask<Table, Table> {
private class FilterTableAction extends OneToOneAction<Table, Table> {
@Override
protected Table execute(Context context, String name, Table input, Laminate inputMeta) {
if (inputMeta.hasValue("filter.from") || inputMeta.hasValue("filter.to")) {
double uLo = inputMeta.getDouble("filter.from", 0);
double uHi = inputMeta.getDouble("filter.to", Double.POSITIVE_INFINITY);
getLogger(inputMeta).debug("Filtering finished");
return TableTransform.filter(input, "Uset", uLo, uHi);
} else if (inputMeta.hasValue("filter.condition")) {
Predicate<DataPoint> predicate = (dp) -> ExpressionUtils.condition(inputMeta.getString("filter.condition"), dp.asMap());
return TableTransform.filter(input, predicate);
} else {
throw new RuntimeException("No filtering condition specified");
}
}
}
}

View File

@ -6,11 +6,11 @@
package inr.numass.actions;
import inr.numass.utils.ExpressionUtils;
import org.junit.Assert;
import org.junit.Test;
import java.util.HashMap;
import java.util.Map;
import org.junit.Assert;
import org.junit.Test;
/**
*
@ -25,7 +25,7 @@ public class PrepareDataActionTest {
public void testExpression() {
Map<String, Object> exprParams = new HashMap<>();
exprParams.put("U", 18000d);
double correctionFactor = ExpressionUtils.evaluate("1 + 13.265 * exp(- U / 2343.4)", exprParams);
double correctionFactor = ExpressionUtils.function("1 + 13.265 * exp(- U / 2343.4)", exprParams);
Assert.assertEquals("Testing expression calculation", 1.006125, correctionFactor, 1e-5);
}