Logging dependencies cleanup

This commit is contained in:
Alexander Nozik 2016-11-14 10:58:38 +03:00
parent 58053812c4
commit c57d1e26ef
4 changed files with 8 additions and 10 deletions

View File

@ -53,7 +53,7 @@ public class MergeDataAction extends ManyToOneAction<Table, Table> {
@Override
protected Table execute(String nodeName, Map<String, Table> data, Meta meta) {
Table res = mergeDataSets(nodeName, data.values());
return new ListTable(res.getFormat(), res.sort("Uset", true));
return new ListTable(res.getFormat(), TableTransform.sort(res, "Uset", true));
}
@Override

View File

@ -22,10 +22,7 @@ import hep.dataforge.exceptions.ContentException;
import hep.dataforge.io.ColumnedDataWriter;
import hep.dataforge.meta.Laminate;
import hep.dataforge.meta.Meta;
import hep.dataforge.tables.DataPoint;
import hep.dataforge.tables.ListTable;
import hep.dataforge.tables.MapPoint;
import hep.dataforge.tables.Table;
import hep.dataforge.tables.*;
import hep.dataforge.values.Value;
import javafx.util.Pair;
import org.apache.commons.math3.analysis.interpolation.SplineInterpolator;
@ -40,7 +37,6 @@ import java.util.TreeMap;
import java.util.concurrent.CopyOnWriteArrayList;
/**
*
* @author Darksnake
*/
@TypedActionDef(name = "monitor", inputType = Table.class, outputType = Table.class)
@ -147,7 +143,7 @@ public class MonitorCorrectAction extends OneToOneAction<Table, Table> {
double[] ys = new double[index.size()];
int i = 0;
for (Entry<Instant, DataPoint> entry : index.entrySet()) {
xs[i] = (double) entry.getKey().toEpochMilli();
ys[i] = getCR(entry.getValue()) / norm;
@ -200,7 +196,7 @@ public class MonitorCorrectAction extends OneToOneAction<Table, Table> {
String monitorFileName = meta.getString("monitorFile", "monitor");
OutputStream stream = buildActionOutput(monitorFileName);
ListTable data = new ListTable(monitorPoints);
ColumnedDataWriter.writeDataSet(stream, data.sort("Timestamp", true), "Monitor points", monitorNames);
ColumnedDataWriter.writeDataSet(stream, TableTransform.sort(data, "Timestamp", true), "Monitor points", monitorNames);
}
}

View File

@ -18,6 +18,7 @@ import hep.dataforge.stat.fit.ParamSet;
import hep.dataforge.stat.fit.UpperLimitGenerator;
import hep.dataforge.tables.ListTable;
import hep.dataforge.tables.Table;
import hep.dataforge.tables.TableTransform;
import hep.dataforge.workspace.AbstractTask;
import hep.dataforge.workspace.TaskModel;
@ -81,7 +82,7 @@ public class NumassFitScanSummaryTask extends AbstractTask<Table> {
pars.getValue("E0"),
pars.getValue("trap"));
});
Table res = builder.build().sort("msterile2", true);
Table res = TableTransform.sort(builder.build(), "msterile2", true);
OutputStream stream = buildActionOutput(nodeName);

View File

@ -7,6 +7,7 @@ import hep.dataforge.description.TypedActionDef;
import hep.dataforge.meta.Laminate;
import hep.dataforge.meta.MetaBuilder;
import hep.dataforge.tables.Table;
import hep.dataforge.tables.TableTransform;
import hep.dataforge.workspace.SingleActionTask;
import hep.dataforge.workspace.TaskModel;
@ -48,7 +49,7 @@ public class NumassTableFilterTask extends SingleActionTask<Table, Table> {
double uLo = inputMeta.getDouble("filter.from", 0);
double uHi = inputMeta.getDouble("filter.to", Double.POSITIVE_INFINITY);
getLogger().debug("Filtering finished");
return input.filter("Uset", uLo, uHi);
return TableTransform.filter(input, "Uset", uLo, uHi);
}
}
}