Fix context and log issues in Grind
This commit is contained in:
parent
c63de01a42
commit
99449c82c9
@ -11,8 +11,6 @@ dependencies {
|
|||||||
compile group: 'commons-cli', name: 'commons-cli', version: '1.+'
|
compile group: 'commons-cli', name: 'commons-cli', version: '1.+'
|
||||||
compile group: 'commons-io', name: 'commons-io', version: '2.+'
|
compile group: 'commons-io', name: 'commons-io', version: '2.+'
|
||||||
compile project(':dataforge-stat:dataforge-minuit')
|
compile project(':dataforge-stat:dataforge-minuit')
|
||||||
compile project(':dataforge-fx')
|
|
||||||
compile project(':dataforge-plots:plots-jfc')
|
|
||||||
compile project(':numass-storage')
|
compile project(':numass-storage')
|
||||||
compile project(':dataforge-grind:grind-terminal')
|
compile project(':dataforge-grind:grind-terminal')
|
||||||
}
|
}
|
||||||
|
@ -3,6 +3,7 @@ package inr.numass
|
|||||||
import hep.dataforge.context.Global
|
import hep.dataforge.context.Global
|
||||||
import hep.dataforge.grind.GrindWorkspaceBuilder
|
import hep.dataforge.grind.GrindWorkspaceBuilder
|
||||||
import hep.dataforge.grind.terminal.GrindTerminal
|
import hep.dataforge.grind.terminal.GrindTerminal
|
||||||
|
import inr.numass.workspace.*
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Created by darksnake on 29-Aug-16.
|
* Created by darksnake on 29-Aug-16.
|
||||||
@ -19,19 +20,25 @@ println "Loading config file from $cfgPath"
|
|||||||
//Global.instance().pluginManager().loadPlugin("inr.numass:numass")
|
//Global.instance().pluginManager().loadPlugin("inr.numass:numass")
|
||||||
println "Starting Grind shell"
|
println "Starting Grind shell"
|
||||||
|
|
||||||
if(cfgPath) {
|
if (cfgPath) {
|
||||||
try {
|
try {
|
||||||
GrindTerminal.dumb().launch {
|
GrindTerminal.dumb().launch {
|
||||||
GrindWorkspaceBuilder numass = new GrindWorkspaceBuilder()
|
GrindWorkspaceBuilder numass = new GrindWorkspaceBuilder(it.shell.context).read(new File(cfgPath)).startup {
|
||||||
.withSpec(NumassWorkspaceSpec)
|
it.loadTask(NumassPrepareTask)
|
||||||
.from(new File(cfgPath))
|
it.loadTask(NumassTableFilterTask)
|
||||||
shell.bind("numass", numass)
|
it.loadTask(NumassFitScanTask)
|
||||||
|
it.loadTask(NumassSubstractEmptySourceTask)
|
||||||
|
it.loadTask(NumassFitScanSummaryTask)
|
||||||
|
it.loadTask(NumassFitTask)
|
||||||
|
it.loadTask(NumassFitSummaryTask)
|
||||||
|
}
|
||||||
|
it.shell.bind("numass", numass)
|
||||||
}
|
}
|
||||||
} catch (Exception ex) {
|
} catch (Exception ex) {
|
||||||
ex.printStackTrace();
|
ex.printStackTrace();
|
||||||
} finally {
|
} finally {
|
||||||
Global.terminate();
|
Global.terminate();
|
||||||
}
|
}
|
||||||
} else{
|
} else {
|
||||||
println "No configuration path. Provide path via --config option"
|
println "No configuration path. Provide path via --config option"
|
||||||
}
|
}
|
@ -1,41 +0,0 @@
|
|||||||
/*
|
|
||||||
* Copyright 2015 Alexander Nozik.
|
|
||||||
*
|
|
||||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
* you may not use this file except in compliance with the License.
|
|
||||||
* You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing, software
|
|
||||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
* See the License for the specific language governing permissions and
|
|
||||||
* limitations under the License.
|
|
||||||
*/
|
|
||||||
|
|
||||||
package inr.numass
|
|
||||||
|
|
||||||
import groovy.transform.CompileStatic
|
|
||||||
import hep.dataforge.grind.WorkspaceSpec
|
|
||||||
import inr.numass.workspace.*
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Created by darksnake on 16-Aug-16.
|
|
||||||
*/
|
|
||||||
@CompileStatic
|
|
||||||
class NumassWorkspaceSpec extends WorkspaceSpec {
|
|
||||||
|
|
||||||
NumassWorkspaceSpec() {
|
|
||||||
//load tasks
|
|
||||||
super.task(NumassPrepareTask)
|
|
||||||
super.task(NumassTableFilterTask)
|
|
||||||
super.task(NumassFitScanTask)
|
|
||||||
super.task(NumassSubstractEmptySourceTask)
|
|
||||||
super.task(NumassFitScanSummaryTask)
|
|
||||||
super.task(NumassFitTask)
|
|
||||||
super.task(NumassFitSummaryTask)
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
}
|
|
@ -7,7 +7,7 @@ import hep.dataforge.grind.GrindWorkspaceBuilder
|
|||||||
*/
|
*/
|
||||||
|
|
||||||
|
|
||||||
new GrindWorkspaceBuilder().from {
|
new GrindWorkspaceBuilder().read {
|
||||||
new File("D:\\Work\\Numass\\sterile2016\\workspace.groovy")
|
new File("D:\\Work\\Numass\\sterile2016\\workspace.groovy")
|
||||||
}.runTask("numass.prepare", "fill_2").computeAll()
|
}.runTask("numass.prepare", "fill_2").computeAll()
|
||||||
|
|
||||||
|
@ -44,7 +44,7 @@ import org.apache.commons.math3.util.FastMath;
|
|||||||
* @author Alexander Nozik
|
* @author Alexander Nozik
|
||||||
*/
|
*/
|
||||||
@PluginDef(group = "inr.numass", name = "numass",
|
@PluginDef(group = "inr.numass", name = "numass",
|
||||||
dependsOn = {"hep.dataforge:actions", "hep.dataforge:math", "hep.dataforge:MINUIT", "hep.dataforge:plots-jfc"},
|
dependsOn = {"hep.dataforge:actions", "hep.dataforge:math", "hep.dataforge:MINUIT"},
|
||||||
description = "Numass data analysis tools")
|
description = "Numass data analysis tools")
|
||||||
public class NumassPlugin extends BasicPlugin {
|
public class NumassPlugin extends BasicPlugin {
|
||||||
|
|
||||||
|
@ -37,7 +37,7 @@ import java.util.List;
|
|||||||
import java.util.function.Function;
|
import java.util.function.Function;
|
||||||
import java.util.stream.Collectors;
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
import static inr.numass.utils.TritiumUtils.evaluateExpression;
|
import static inr.numass.utils.TritiumUtils.pointExpression;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @author Darksnake
|
* @author Darksnake
|
||||||
@ -80,7 +80,7 @@ public class PrepareDataAction extends OneToOneAction<NumassData, Table> {
|
|||||||
|
|
||||||
if (meta.hasValue("correction")) {
|
if (meta.hasValue("correction")) {
|
||||||
final String correction = meta.getString("correction");
|
final String correction = meta.getString("correction");
|
||||||
corrections.add((point) -> evaluateExpression(point, correction));
|
corrections.add((point) -> pointExpression(correction, point));
|
||||||
}
|
}
|
||||||
|
|
||||||
List<DataPoint> dataList = new ArrayList<>();
|
List<DataPoint> dataList = new ArrayList<>();
|
||||||
@ -153,7 +153,7 @@ public class PrepareDataAction extends OneToOneAction<NumassData, Table> {
|
|||||||
return new Correction() {
|
return new Correction() {
|
||||||
@Override
|
@Override
|
||||||
public double corr(NMPoint point) {
|
public double corr(NMPoint point) {
|
||||||
return evaluateExpression(point, expr);
|
return pointExpression(expr, point);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -161,7 +161,7 @@ public class PrepareDataAction extends OneToOneAction<NumassData, Table> {
|
|||||||
if (errExpr.isEmpty()) {
|
if (errExpr.isEmpty()) {
|
||||||
return 0;
|
return 0;
|
||||||
} else {
|
} else {
|
||||||
return evaluateExpression(point, errExpr);
|
return pointExpression(errExpr, point);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -202,7 +202,7 @@ public class PrepareDataAction extends OneToOneAction<NumassData, Table> {
|
|||||||
private final Function<NMPoint, Double> deadTimeFunction;
|
private final Function<NMPoint, Double> deadTimeFunction;
|
||||||
|
|
||||||
public DeadTimeCorrection(String expr) {
|
public DeadTimeCorrection(String expr) {
|
||||||
deadTimeFunction = point -> evaluateExpression(point, expr);
|
deadTimeFunction = point -> pointExpression(expr, point);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -15,21 +15,23 @@
|
|||||||
*/
|
*/
|
||||||
package inr.numass.models;
|
package inr.numass.models;
|
||||||
|
|
||||||
import hep.dataforge.stat.parametric.AbstractParametricFunction;
|
|
||||||
import static hep.dataforge.stat.parametric.FunctionUtils.getSpectrumDerivativeFunction;
|
|
||||||
import static hep.dataforge.stat.parametric.FunctionUtils.getSpectrumFunction;
|
|
||||||
import hep.dataforge.stat.parametric.ParametricFunction;
|
|
||||||
import hep.dataforge.maths.MathUtils;
|
import hep.dataforge.maths.MathUtils;
|
||||||
import hep.dataforge.maths.NamedVector;
|
import hep.dataforge.maths.NamedVector;
|
||||||
import hep.dataforge.names.AbstractNamedSet;
|
import hep.dataforge.names.AbstractNamedSet;
|
||||||
|
import hep.dataforge.stat.parametric.AbstractParametricFunction;
|
||||||
|
import hep.dataforge.stat.parametric.ParametricFunction;
|
||||||
import hep.dataforge.values.NamedValueSet;
|
import hep.dataforge.values.NamedValueSet;
|
||||||
import java.util.HashMap;
|
|
||||||
import java.util.Map;
|
|
||||||
import java.util.Objects;
|
|
||||||
import org.apache.commons.math3.analysis.UnivariateFunction;
|
import org.apache.commons.math3.analysis.UnivariateFunction;
|
||||||
import org.apache.commons.math3.analysis.interpolation.SplineInterpolator;
|
import org.apache.commons.math3.analysis.interpolation.SplineInterpolator;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.Objects;
|
||||||
|
|
||||||
|
import static hep.dataforge.stat.parametric.FunctionUtils.getSpectrumDerivativeFunction;
|
||||||
|
import static hep.dataforge.stat.parametric.FunctionUtils.getSpectrumFunction;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
* @author Darksnake
|
* @author Darksnake
|
||||||
@ -52,7 +54,7 @@ public class NamedSpectrumCaching extends AbstractParametricFunction {
|
|||||||
this.a = a;
|
this.a = a;
|
||||||
this.b = b;
|
this.b = b;
|
||||||
this.source = spectrum;
|
this.source = spectrum;
|
||||||
spectrumDerivCache = new HashMap<>(source.getDimension());
|
spectrumDerivCache = new HashMap<>(source.size());
|
||||||
// spectrumDerivCache = new CacheElement[source.getDimension()];
|
// spectrumDerivCache = new CacheElement[source.getDimension()];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -41,7 +41,7 @@ public class NumassTransmission extends AbstractParametricBiFunction {
|
|||||||
Map<String, Object> binding = new HashMap<>();
|
Map<String, Object> binding = new HashMap<>();
|
||||||
binding.put("Ei", Ei);
|
binding.put("Ei", Ei);
|
||||||
binding.put("Ef", Ef);
|
binding.put("Ef", Ef);
|
||||||
return ExpressionUtils.evaluate(trapFuncStr, binding);
|
return ExpressionUtils.function(trapFuncStr, binding);
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
|
@ -8,7 +8,7 @@ package inr.numass.utils;
|
|||||||
import groovy.lang.Binding;
|
import groovy.lang.Binding;
|
||||||
import groovy.lang.GroovyShell;
|
import groovy.lang.GroovyShell;
|
||||||
import groovy.lang.Script;
|
import groovy.lang.Script;
|
||||||
import hep.dataforge.utils.Utils;
|
import hep.dataforge.utils.Misc;
|
||||||
import org.codehaus.groovy.control.CompilerConfiguration;
|
import org.codehaus.groovy.control.CompilerConfiguration;
|
||||||
import org.codehaus.groovy.control.customizers.ImportCustomizer;
|
import org.codehaus.groovy.control.customizers.ImportCustomizer;
|
||||||
|
|
||||||
@ -18,7 +18,7 @@ import java.util.Map;
|
|||||||
* @author Alexander Nozik
|
* @author Alexander Nozik
|
||||||
*/
|
*/
|
||||||
public class ExpressionUtils {
|
public class ExpressionUtils {
|
||||||
private static final Map<String, Script> cache = Utils.getLRUCache(100);
|
private static final Map<String, Script> cache = Misc.getLRUCache(100);
|
||||||
private static final GroovyShell shell;
|
private static final GroovyShell shell;
|
||||||
|
|
||||||
static {
|
static {
|
||||||
@ -38,7 +38,7 @@ public class ExpressionUtils {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
public static double evaluate(String expression, Map<String, Object> binding) {
|
public static double function(String expression, Map<String, ? extends Object> binding) {
|
||||||
synchronized (cache) {
|
synchronized (cache) {
|
||||||
Binding b = new Binding(binding);
|
Binding b = new Binding(binding);
|
||||||
Script script = getScript(expression);
|
Script script = getScript(expression);
|
||||||
@ -46,4 +46,13 @@ public class ExpressionUtils {
|
|||||||
return ((Number) script.run()).doubleValue();
|
return ((Number) script.run()).doubleValue();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static boolean condition(String expression, Map<String, ? extends Object> binding){
|
||||||
|
synchronized (cache) {
|
||||||
|
Binding b = new Binding(binding);
|
||||||
|
Script script = getScript(expression);
|
||||||
|
script.setBinding(b);
|
||||||
|
return (boolean) script.run();
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -129,16 +129,16 @@ public class TritiumUtils {
|
|||||||
/**
|
/**
|
||||||
* Evaluate groovy expression using numass point as parameter
|
* Evaluate groovy expression using numass point as parameter
|
||||||
*
|
*
|
||||||
* @param point
|
|
||||||
* @param expression
|
* @param expression
|
||||||
|
* @param point
|
||||||
* @return
|
* @return
|
||||||
*/
|
*/
|
||||||
public static double evaluateExpression(NMPoint point, String expression) {
|
public static double pointExpression(String expression, NMPoint point) {
|
||||||
Map<String, Object> exprParams = new HashMap<>();
|
Map<String, Object> exprParams = new HashMap<>();
|
||||||
exprParams.put("T", point.getLength());
|
exprParams.put("T", point.getLength());
|
||||||
exprParams.put("U", point.getUread());
|
exprParams.put("U", point.getUread());
|
||||||
exprParams.put("cr", ((double) point.getEventsCount()) / point.getLength());
|
exprParams.put("cr", ((double) point.getEventsCount()) / point.getLength());
|
||||||
exprParams.put("point", point);
|
exprParams.put("point", point);
|
||||||
return ExpressionUtils.evaluate(expression, exprParams);
|
return ExpressionUtils.function(expression, exprParams);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -30,7 +30,7 @@ public class UnderflowCorrection {
|
|||||||
public double get(Logable log, Meta meta, NMPoint point) {
|
public double get(Logable log, Meta meta, NMPoint point) {
|
||||||
if (point.getUset() >= meta.getDouble("underflow.threshold", 17000)) {
|
if (point.getUset() >= meta.getDouble("underflow.threshold", 17000)) {
|
||||||
if (meta.hasValue("underflow.function")) {
|
if (meta.hasValue("underflow.function")) {
|
||||||
return TritiumUtils.evaluateExpression(point, meta.getString("underflow.function"));
|
return TritiumUtils.pointExpression(meta.getString("underflow.function"), point);
|
||||||
} else {
|
} else {
|
||||||
return 1;
|
return 1;
|
||||||
}
|
}
|
||||||
|
@ -7,10 +7,14 @@ import hep.dataforge.data.DataNode;
|
|||||||
import hep.dataforge.description.TypedActionDef;
|
import hep.dataforge.description.TypedActionDef;
|
||||||
import hep.dataforge.meta.Laminate;
|
import hep.dataforge.meta.Laminate;
|
||||||
import hep.dataforge.meta.MetaBuilder;
|
import hep.dataforge.meta.MetaBuilder;
|
||||||
|
import hep.dataforge.tables.DataPoint;
|
||||||
import hep.dataforge.tables.Table;
|
import hep.dataforge.tables.Table;
|
||||||
import hep.dataforge.tables.TableTransform;
|
import hep.dataforge.tables.TableTransform;
|
||||||
import hep.dataforge.workspace.SingleActionTask;
|
import hep.dataforge.workspace.SingleActionTask;
|
||||||
import hep.dataforge.workspace.TaskModel;
|
import hep.dataforge.workspace.TaskModel;
|
||||||
|
import inr.numass.utils.ExpressionUtils;
|
||||||
|
|
||||||
|
import java.util.function.Predicate;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Created by darksnake on 13-Aug-16.
|
* Created by darksnake on 13-Aug-16.
|
||||||
@ -47,10 +51,17 @@ public class NumassTableFilterTask extends SingleActionTask<Table, Table> {
|
|||||||
private class FilterTableAction extends OneToOneAction<Table, Table> {
|
private class FilterTableAction extends OneToOneAction<Table, Table> {
|
||||||
@Override
|
@Override
|
||||||
protected Table execute(Context context, String name, Table input, Laminate inputMeta) {
|
protected Table execute(Context context, String name, Table input, Laminate inputMeta) {
|
||||||
double uLo = inputMeta.getDouble("filter.from", 0);
|
if (inputMeta.hasValue("filter.from") || inputMeta.hasValue("filter.to")) {
|
||||||
double uHi = inputMeta.getDouble("filter.to", Double.POSITIVE_INFINITY);
|
double uLo = inputMeta.getDouble("filter.from", 0);
|
||||||
getLogger(inputMeta).debug("Filtering finished");
|
double uHi = inputMeta.getDouble("filter.to", Double.POSITIVE_INFINITY);
|
||||||
return TableTransform.filter(input, "Uset", uLo, uHi);
|
getLogger(inputMeta).debug("Filtering finished");
|
||||||
|
return TableTransform.filter(input, "Uset", uLo, uHi);
|
||||||
|
} else if (inputMeta.hasValue("filter.condition")) {
|
||||||
|
Predicate<DataPoint> predicate = (dp) -> ExpressionUtils.condition(inputMeta.getString("filter.condition"), dp.asMap());
|
||||||
|
return TableTransform.filter(input, predicate);
|
||||||
|
} else {
|
||||||
|
throw new RuntimeException("No filtering condition specified");
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -6,11 +6,11 @@
|
|||||||
package inr.numass.actions;
|
package inr.numass.actions;
|
||||||
|
|
||||||
import inr.numass.utils.ExpressionUtils;
|
import inr.numass.utils.ExpressionUtils;
|
||||||
|
import org.junit.Assert;
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import org.junit.Assert;
|
|
||||||
|
|
||||||
import org.junit.Test;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
@ -25,7 +25,7 @@ public class PrepareDataActionTest {
|
|||||||
public void testExpression() {
|
public void testExpression() {
|
||||||
Map<String, Object> exprParams = new HashMap<>();
|
Map<String, Object> exprParams = new HashMap<>();
|
||||||
exprParams.put("U", 18000d);
|
exprParams.put("U", 18000d);
|
||||||
double correctionFactor = ExpressionUtils.evaluate("1 + 13.265 * exp(- U / 2343.4)", exprParams);
|
double correctionFactor = ExpressionUtils.function("1 + 13.265 * exp(- U / 2343.4)", exprParams);
|
||||||
Assert.assertEquals("Testing expression calculation", 1.006125, correctionFactor, 1e-5);
|
Assert.assertEquals("Testing expression calculation", 1.006125, correctionFactor, 1e-5);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user