Trying to fix dead lock in KAction. Failed for now.

This commit is contained in:
Alexander Nozik 2017-12-07 17:03:42 +03:00
parent d9bb3ada0a
commit 2556cf36f8
7 changed files with 88 additions and 26 deletions

View File

@ -10,7 +10,6 @@ import hep.dataforge.io.MetaFileReader
import hep.dataforge.io.XMLMetaReader import hep.dataforge.io.XMLMetaReader
import hep.dataforge.meta.Meta import hep.dataforge.meta.Meta
import hep.dataforge.storage.commons.StorageConnection import hep.dataforge.storage.commons.StorageConnection
import hep.dataforge.storage.commons.StorageFactory
import hep.dataforge.storage.commons.StorageManager import hep.dataforge.storage.commons.StorageManager
import inr.numass.client.ClientUtils import inr.numass.client.ClientUtils
import javafx.application.Application import javafx.application.Application

View File

@ -3,7 +3,7 @@ buildscript {
mavenCentral() mavenCentral()
} }
dependencies { dependencies {
classpath 'com.google.protobuf:protobuf-gradle-plugin:0.8.1' classpath 'com.google.protobuf:protobuf-gradle-plugin:0.8.3'
} }
} }
@ -14,14 +14,17 @@ description = "A bse package with minimal dependencies for numass"
dependencies { dependencies {
compile "hep.dataforge:dataforge-storage" //project(':dataforge-storage') compile "hep.dataforge:dataforge-storage" //project(':dataforge-storage')
compile 'com.google.protobuf:protobuf-java:3.3.0' compile 'com.google.protobuf:protobuf-java:3.5.0'
// https://mvnrepository.com/artifact/com.github.robtimus/sftp-fs
compile group: 'com.github.robtimus', name: 'sftp-fs', version: '1.1.1'
} }
protobuf { protobuf {
// Configure the protoc executable // Configure the protoc executable
protoc { protoc {
// Download from repositories // Download from repositories
artifact = 'com.google.protobuf:protoc:3.3.0' artifact = 'com.google.protobuf:protoc:3.5.0'
} }
generatedFilesBaseDir = "$projectDir/gen" generatedFilesBaseDir = "$projectDir/gen"
} }

View File

@ -4,6 +4,8 @@ import hep.dataforge.context.Context;
import hep.dataforge.data.DataFactory; import hep.dataforge.data.DataFactory;
import hep.dataforge.data.DataTree; import hep.dataforge.data.DataTree;
import hep.dataforge.meta.Meta; import hep.dataforge.meta.Meta;
import hep.dataforge.storage.api.Storage;
import hep.dataforge.storage.commons.StorageManager;
import hep.dataforge.storage.commons.StorageUtils; import hep.dataforge.storage.commons.StorageUtils;
import inr.numass.data.api.NumassSet; import inr.numass.data.api.NumassSet;
@ -24,7 +26,8 @@ public class NumassDataFactory extends DataFactory<NumassSet> {
@Override @Override
protected void fill(DataTree.Builder<NumassSet> builder, Context context, Meta meta) { protected void fill(DataTree.Builder<NumassSet> builder, Context context, Meta meta) {
NumassStorage storage = new NumassStorage(context,meta); Meta newMeta = meta.getBuilder().setValue("type", "numass");
Storage storage = context.loadFeature("hep.dataforge:storage", StorageManager.class).buildStorage(newMeta);
StorageUtils.loaderStream(storage).forEach(loader -> { StorageUtils.loaderStream(storage).forEach(loader -> {
if (loader instanceof NumassSet) { if (loader instanceof NumassSet) {
builder.putStatic(loader.getFullName().toUnescaped(), (NumassSet) loader); builder.putStatic(loader.getFullName().toUnescaped(), (NumassSet) loader);

View File

@ -52,13 +52,13 @@ public class NumassStorage extends FileStorage {
public static final String NUMASS_ZIP_EXTENSION = ".nm.zip"; public static final String NUMASS_ZIP_EXTENSION = ".nm.zip";
public static final String NUMASS_DATA_LOADER_TYPE = "numassData"; public static final String NUMASS_DATA_LOADER_TYPE = "numassData";
protected NumassStorage(FileStorage parent, String path, Meta config) throws StorageException { protected NumassStorage(FileStorage parent, Meta config, String shelf) throws StorageException {
super(parent, path, config); super(parent, config, shelf);
super.refresh(); super.refresh();
} }
public NumassStorage(Context context, Meta config) throws StorageException { public NumassStorage(Context context, Meta config, Path path) throws StorageException {
super(context, config); super(context, config, path);
super.refresh(); super.refresh();
} }
@ -75,7 +75,7 @@ public class NumassStorage extends FileStorage {
NumassDataLoader.fromDir(this, file, null)); NumassDataLoader.fromDir(this, file, null));
} else { } else {
this.shelves.put(entryName(file), this.shelves.put(entryName(file),
new NumassStorage(this, entryName(file), getMeta())); new NumassStorage(this, getMeta(), entryName(file)));
} }
} else if (file.getFileName().endsWith(NUMASS_ZIP_EXTENSION)) { } else if (file.getFileName().endsWith(NUMASS_ZIP_EXTENSION)) {
this.loaders.put(entryName(file), NumassDataLoader.fromFile(this, file)); this.loaders.put(entryName(file), NumassDataLoader.fromFile(this, file));
@ -134,8 +134,8 @@ public class NumassStorage extends FileStorage {
} }
@Override @Override
public NumassStorage createShelf(String path, Meta meta) throws StorageException { public NumassStorage createShelf(Meta meta, String path) throws StorageException {
return new NumassStorage(this, path, meta); return new NumassStorage(this, meta, path);
} }
/** /**
@ -166,6 +166,17 @@ public class NumassStorage extends FileStorage {
return getMeta().getString("description", ""); return getMeta().getString("description", "");
} }
@Override
public void close() throws Exception {
super.close();
//close remote file system after use
try {
getDataDir().getFileSystem().close();
} catch (UnsupportedOperationException ex) {
}
}
public static class NumassDataPointEvent extends Event { public static class NumassDataPointEvent extends Event {
public static final String FILE_NAME_KEY = "fileName"; public static final String FILE_NAME_KEY = "fileName";

View File

@ -1,35 +1,40 @@
package inr.numass.data.storage; package inr.numass.data.storage;
import com.github.robtimus.filesystems.sftp.SFTPEnvironment;
import hep.dataforge.context.Context; import hep.dataforge.context.Context;
import hep.dataforge.context.Global; import hep.dataforge.context.Global;
import hep.dataforge.meta.Meta; import hep.dataforge.meta.Meta;
import hep.dataforge.meta.MetaBuilder; import hep.dataforge.meta.MetaBuilder;
import hep.dataforge.storage.api.Storage; import hep.dataforge.storage.api.Storage;
import hep.dataforge.storage.api.StorageType; import hep.dataforge.storage.api.StorageType;
import org.jetbrains.annotations.NotNull;
import java.io.File; import java.io.File;
import java.net.URI;
import java.nio.file.FileSystem;
import java.nio.file.FileSystems;
import java.nio.file.Path;
import java.nio.file.Paths;
/** /**
* Created by darksnake on 17-May-17. * Created by darksnake on 17-May-17.
*/ */
public class NumassStorageFactory implements StorageType { public class NumassStorageFactory implements StorageType {
public static MetaBuilder buildStorageMeta(String path, boolean readOnly, boolean monitor){
return new MetaBuilder("storage")
.setValue("path", path)
.setValue("type", "numass")
.setValue("readOnly", readOnly)
.setValue("monitor", monitor);
}
/** /**
* Build local storage with Global context. Used for tests. * Build local storage with Global context. Used for tests.
*
* @param file * @param file
* @return * @return
*/ */
public static NumassStorage buildLocal(File file) { @NotNull
return new NumassStorage(Global.instance(), public static NumassStorage buildLocal(File file, boolean monitor) {
new MetaBuilder("storage").setValue("path", file.toPath())); Path path = file.toPath();
Meta meta = new MetaBuilder("storage")
.setValue("path", path)
.setValue("monitor", monitor);
return new NumassStorage(Global.instance(), meta, path);
} }
@Override @Override
@ -37,8 +42,32 @@ public class NumassStorageFactory implements StorageType {
return "numass"; return "numass";
} }
@NotNull
@Override @Override
public Storage build(Context context, Meta meta) { public Storage build(Context context, Meta meta) {
return new NumassStorage(context, meta); if (meta.hasValue("path")) {
URI uri = URI.create(meta.getString("path"));
Path path;
if (uri.getScheme().startsWith("ssh")) {
try {
String username = meta.getString("userName", uri.getUserInfo());
//String host = meta.getString("host", uri.getHost());
int port = meta.getInt("port", 22);
SFTPEnvironment env = new SFTPEnvironment()
.withUsername(username)
.withPassword(meta.getString("password","").toCharArray());
FileSystem fs = FileSystems.newFileSystem(uri, env,context.getClassLoader());
path = fs.getPath(uri.getPath());
} catch (Exception e) {
throw new RuntimeException(e);
}
} else {
path = Paths.get(uri);
}
return new NumassStorage(context, meta, path);
} else {
context.getLogger().warn("A storage path not provided. Creating default root storage in the working directory");
return new NumassStorage(context, meta, context.getIo().getWorkDirectory());
}
} }
} }

View File

@ -0,0 +1,17 @@
package inr.numass.scripts.temp
import hep.dataforge.context.Context
import hep.dataforge.grind.Grind
import hep.dataforge.grind.GrindShell
import hep.dataforge.storage.api.Storage
import hep.dataforge.storage.commons.StorageManager
new GrindShell().eval {
def ctx = context as Context;
//(LoggerFactory.getLogger(Logger.ROOT_LOGGER_NAME) as Logger).setLevel(Level.INFO)
def storageMeta = Grind.buildMeta(type: "numass", path: "sftp://192.168.111.1/home/trdat/data/2017_11", userName: "trdat", password: "Anomaly")
Storage storage = ctx.loadFeature("hep.dataforge:storage", StorageManager).buildStorage(storageMeta);
}

View File

@ -103,7 +103,7 @@ val analyzeTask = task("analyze") {
} }
pipe<NumassSet, Table> { set -> pipe<NumassSet, Table> { set ->
SmartAnalyzer().analyzeSet(set, meta).also { res -> SmartAnalyzer().analyzeSet(set, meta).also { res ->
context.getIo().out("numass.analyze", name).use { context.io.out("numass.analyze", name).use {
NumassUtils.write(it, meta, res) NumassUtils.write(it, meta, res)
} }
} }