Compare commits
348 Commits
dev-zeleny
...
master
Author | SHA1 | Date | |
---|---|---|---|
c754dc3471 | |||
8f7d754301 | |||
c923c3e7d3 | |||
99b2d941c8 | |||
191af77f57 | |||
79759c5256 | |||
2eb965e563 | |||
1b29e377ca | |||
2634a19285 | |||
261c415d3d | |||
e52d509c2b | |||
706521a6b6 | |||
94000689da | |||
851fdda311 | |||
cbbcd18df3 | |||
dc2bf5da83 | |||
f732b85cc5 | |||
259b882e63 | |||
526f230300 | |||
|
7fca5db390 | ||
|
be160ba98a | ||
|
182f206b88 | ||
3806f97c77 | |||
d5ebef404f | |||
3644533043 | |||
ee5afcdafe | |||
de476fb273 | |||
a136db16ff | |||
a699c36f8e | |||
2aba1b48dc | |||
cfa20eedba | |||
|
f78f0f814f | ||
4e7ead0763 | |||
4543648cda | |||
b6949310ea | |||
2c2f33427a | |||
29fa30fb51 | |||
f3afb5e9fe | |||
61c8df9eb0 | |||
707b59e6fc | |||
add400b324 | |||
58c5355e25 | |||
f83b759e75 | |||
7d88f828d7 | |||
5d7ddb4e00 | |||
82838b6a92 | |||
e41fdfc086 | |||
4117a05df4 | |||
5406a6a64c | |||
0cc4dc0db7 | |||
233639f0b6 | |||
70bd92f019 | |||
f8eea45ed0 | |||
e14c0a695e | |||
3c1fe23366 | |||
81e2ad06cc | |||
6ca76cff17 | |||
9a24e1e392 | |||
9cceb44a90 | |||
0b68c1edae | |||
b8869570ce | |||
4833128857 | |||
6bd8a7acbc | |||
f5d32ba511 | |||
0fc2198832 | |||
a546552540 | |||
fe92e8fccf | |||
7d9189e15c | |||
0622bacc4d | |||
bedab0dc86 | |||
f0820a3bed | |||
665f317e4e | |||
82d37f4b55 | |||
6d396368b7 | |||
77857289f0 | |||
eaa9d40d60 | |||
6b41163ed3 | |||
e5000171f1 | |||
3c6bc15716 | |||
11143e4ba1 | |||
91621864c2 | |||
1e97165328 | |||
|
be8e971436 | ||
|
9cc30b1f4e | ||
|
7414e60192 | ||
|
8c0bc05a9a | ||
c480cd8e4d | |||
64e0c554cc | |||
532e0c253b | |||
c423dc214e | |||
d178c4ff0d | |||
|
387ab8747e | ||
3f54eee578 | |||
aded38254e | |||
00d964eef3 | |||
b07d281a83 | |||
81cdd38c40 | |||
0ad6852e36 | |||
a71bb732da | |||
|
acfe9c2f74 | ||
ce8074c104 | |||
922a3b07ee | |||
81abbe28a9 | |||
da9d6e7639 | |||
66c708d9fb | |||
c1065c2885 | |||
e5f422f9ca | |||
c01bc36d41 | |||
be2daca25e | |||
b968d735ce | |||
67554a8c98 | |||
24187722e4 | |||
d3c129526d | |||
5632487dca | |||
e432b07201 | |||
28a6914747 | |||
90a92c4121 | |||
b404615145 | |||
7aec2f3547 | |||
3ba5a9076b | |||
8763d63e28 | |||
4a76063093 | |||
5fbbac465a | |||
b387b21554 | |||
c8bd3390cb | |||
a3479e74f7 | |||
9f5b010847 | |||
679175391a | |||
14455c2b2b | |||
a9cec666a3 | |||
d2ea1a975e | |||
73b3bbe7fc | |||
474597777c | |||
9d3c7149b7 | |||
bc9cd3b5a8 | |||
254163bdef | |||
|
3bdaf332cb | ||
|
6e769d1089 | ||
|
e62ff61814 | ||
1037c45c0d | |||
82b328f797 | |||
a6f1e54255 | |||
3888b2d9e7 | |||
0acb6ec448 | |||
|
b616e3ad6d | ||
|
d1381cc98c | ||
|
543023d2df | ||
|
d769b0d389 | ||
|
3fc698dd09 | ||
|
32b986fc47 | ||
3e8421187f | |||
5afe0523f1 | |||
a7ecbfb763 | |||
2352f1cff1 | |||
1a983665f8 | |||
53393e7958 | |||
874a253292 | |||
187094d942 | |||
23f1d4f7fd | |||
f1f5f7a70c | |||
9fbc482549 | |||
|
b86c6141cd | ||
ddd2bd99be | |||
|
e13e3ab6bf | ||
bdef0d9185 | |||
6bdcd0f37d | |||
18bfde9978 | |||
64100dcfe4 | |||
3f0d088c48 | |||
b6dad141f8 | |||
f3d43cd40a | |||
c98ffd1eb4 | |||
488cd5a939 | |||
cf0c934acf | |||
|
a7ee2f5922 | ||
a726307641 | |||
db4ed02f9d | |||
2b945d4a78 | |||
|
f6210fde7f | ||
5d02520904 | |||
7d3df24568 | |||
1970243785 | |||
03337f00f0 | |||
81fb064d38 | |||
730ac69544 | |||
fcd99b1ca8 | |||
6a0bfae931 | |||
11ba116a89 | |||
4d19d97c53 | |||
e66ae408cd | |||
0e053ab78d | |||
66355793ee | |||
e88178ffe7 | |||
25cba0c6de | |||
2291072e26 | |||
80d3a64cdf | |||
9dacd98f64 | |||
366d32a04a | |||
ac8631e3a0 | |||
7e4d1af55f | |||
3334380693 | |||
b916a038f7 | |||
23fae9794f | |||
9ed4245d84 | |||
13c0d189bb | |||
8b1d5eb69e | |||
030f3ed6fe | |||
027d5ed923 | |||
f946777f40 | |||
7a9f7da7f2 | |||
81375d4644 | |||
9c3f9420ea | |||
adf65a5bde | |||
460dc77d51 | |||
76968f07e5 | |||
6389a25aaf | |||
1c89543d73 | |||
397a19fb32 | |||
e931994b75 | |||
617ed13efa | |||
89f0d627b8 | |||
702589f7b3 | |||
1f773cc230 | |||
95e6925d55 | |||
62e08a1b75 | |||
18407424fe | |||
4c5f0e9e43 | |||
7c4d69ec1b | |||
e317b67a48 | |||
2ba4121a36 | |||
eb16294a7e | |||
17c9bf3d54 | |||
221cc65b78 | |||
2098d96561 | |||
4c98d62e8f | |||
47d49d1e0e | |||
1b46d00a91 | |||
|
6912f26291 | ||
2315fb963b | |||
3a6aef95b1 | |||
167b73df85 | |||
2eb5fe03cd | |||
0a77f729ec | |||
920366388d | |||
1284aa9f2e | |||
d68fbfd8a5 | |||
3ab3f2766c | |||
d5992c721d | |||
8ef0b2247c | |||
c0348dd06f | |||
d6ed5f94e4 | |||
|
ccb11b52c8 | ||
01c209d1d6 | |||
acfb070938 | |||
810f79b9ed | |||
44ef4ad97f | |||
bc4456637c | |||
4ab9751bea | |||
b229de3eb7 | |||
7658839acf | |||
d170f5d60c | |||
fed787331d | |||
606faa5e1b | |||
8a8484172c | |||
52a3c8bc6f | |||
87af89b47d | |||
eeb4643d93 | |||
a767f279a3 | |||
b7d01bc40f | |||
99fee604d8 | |||
6ad5f162a1 | |||
57b263ec63 | |||
b8d775aa30 | |||
617612bf67 | |||
b5b0a6898e | |||
34110f6be1 | |||
b72f73f75b | |||
4fde6c4a48 | |||
c3d4836a11 | |||
8529c725e7 | |||
|
d951668911 | ||
fcba27cd72 | |||
c280671e61 | |||
faeb737672 | |||
87326d05c7 | |||
059ade7973 | |||
73fbe19fed | |||
8fc332d42e | |||
|
f5b2b4c9e4 | ||
9d54358307 | |||
086fcd441b | |||
|
b5967902cb | ||
fc0454d339 | |||
|
90a241ac75 | ||
fb559f4562 | |||
453a1bc755 | |||
5e8e3014f7 | |||
aa52c45c5a | |||
fb28c854b3 | |||
7c68fc6e85 | |||
0434360d46 | |||
f67fb63c45 | |||
|
572e5eea60 | ||
b0950f8415 | |||
6450696157 | |||
eebfe534cc | |||
7efa19920b | |||
56e7d55450 | |||
01711c2b8c | |||
7f0ad68d9d | |||
66cee2d42b | |||
5c3d51de58 | |||
e0aab57953 | |||
417c292507 | |||
|
7dde03ee68 | ||
e835d81183 | |||
bc8878da48 | |||
b78fc2e7d7 | |||
2c52e9aaa3 | |||
4ab71a79db | |||
db03dfaae9 | |||
e2845f4efc | |||
93c806c3bf | |||
a2e8d4f018 | |||
8dfd56f02e | |||
dfea68b65c | |||
cd4f07267b | |||
126e59f81a | |||
061c570407 | |||
819ab3ab20 | |||
f906fbdb0a | |||
fe6760eee6 | |||
b83821af51 | |||
736ec621b0 | |||
1b879eccc7 | |||
e532e8358e | |||
ce8be78549 | |||
659fded3a5 | |||
f9ae9348e2 | |||
effac131de | |||
be2bca24b1 | |||
4eb07949b4 | |||
0ec42689d7 | |||
c789dabdae | |||
ab47d7723e | |||
ad2f5681b6 | |||
76e7f47528 | |||
2e643287ef |
41
.github/workflows/build.yml
vendored
Normal file
41
.github/workflows/build.yml
vendored
Normal file
@ -0,0 +1,41 @@
|
||||
name: Gradle build
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ dev, master ]
|
||||
pull_request:
|
||||
|
||||
jobs:
|
||||
build:
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ macOS-latest, windows-latest ]
|
||||
runs-on: ${{matrix.os}}
|
||||
timeout-minutes: 40
|
||||
steps:
|
||||
- name: Checkout the repo
|
||||
uses: actions/checkout@v2
|
||||
- name: Set up JDK 11
|
||||
uses: DeLaGuardo/setup-graalvm@4.0
|
||||
with:
|
||||
graalvm: 21.2.0
|
||||
java: java11
|
||||
arch: amd64
|
||||
- name: Cache gradle
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: |
|
||||
~/.gradle/caches
|
||||
~/.gradle/wrapper
|
||||
key: ${{ runner.os }}-gradle-${{ hashFiles('*.gradle.kts') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-gradle-
|
||||
- name: Cache konan
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: ~/.konan
|
||||
key: ${{ runner.os }}-gradle-${{ hashFiles('*.gradle.kts') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-gradle-
|
||||
- name: Build
|
||||
run: ./gradlew build --build-cache --no-daemon --stacktrace
|
17
.github/workflows/gradle.yml
vendored
17
.github/workflows/gradle.yml
vendored
@ -1,17 +0,0 @@
|
||||
name: Gradle build
|
||||
|
||||
on: [push]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v1
|
||||
- name: Set up JDK 11
|
||||
uses: actions/setup-java@v1
|
||||
with:
|
||||
java-version: 11
|
||||
- name: Build with Gradle
|
||||
run: ./gradlew build
|
31
.github/workflows/pages.yml
vendored
Normal file
31
.github/workflows/pages.yml
vendored
Normal file
@ -0,0 +1,31 @@
|
||||
name: Dokka publication
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
release:
|
||||
types: [ created ]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 40
|
||||
steps:
|
||||
- uses: actions/checkout@v3.0.0
|
||||
- uses: actions/setup-java@v3.0.0
|
||||
with:
|
||||
java-version: 11
|
||||
distribution: liberica
|
||||
- name: Cache konan
|
||||
uses: actions/cache@v3.0.1
|
||||
with:
|
||||
path: ~/.konan
|
||||
key: ${{ runner.os }}-gradle-${{ hashFiles('*.gradle.kts') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-gradle-
|
||||
- uses: gradle/gradle-build-action@v2.4.2
|
||||
with:
|
||||
arguments: dokkaHtmlMultiModule --no-parallel
|
||||
- uses: JamesIves/github-pages-deploy-action@v4.3.0
|
||||
with:
|
||||
branch: gh-pages
|
||||
folder: build/dokka/htmlMultiModule
|
50
.github/workflows/publish.yml
vendored
Normal file
50
.github/workflows/publish.yml
vendored
Normal file
@ -0,0 +1,50 @@
|
||||
name: Gradle publish
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
release:
|
||||
types: [ created ]
|
||||
|
||||
jobs:
|
||||
publish:
|
||||
environment:
|
||||
name: publish
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ macOS-latest, windows-latest ]
|
||||
runs-on: ${{matrix.os}}
|
||||
steps:
|
||||
- uses: actions/checkout@v3.0.0
|
||||
- uses: actions/setup-java@v3.10.0
|
||||
with:
|
||||
java-version: 11
|
||||
distribution: liberica
|
||||
- name: Cache konan
|
||||
uses: actions/cache@v3.0.1
|
||||
with:
|
||||
path: ~/.konan
|
||||
key: ${{ runner.os }}-gradle-${{ hashFiles('*.gradle.kts') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-gradle-
|
||||
- name: Publish Windows Artifacts
|
||||
if: matrix.os == 'windows-latest'
|
||||
uses: gradle/gradle-build-action@v2.4.2
|
||||
with:
|
||||
arguments: |
|
||||
publishAllPublicationsToSpaceRepository
|
||||
-Ppublishing.targets=all
|
||||
-Ppublishing.space.user=${{ secrets.SPACE_APP_ID }}
|
||||
-Ppublishing.space.token=${{ secrets.SPACE_APP_SECRET }}
|
||||
- name: Publish Mac Artifacts
|
||||
if: matrix.os == 'macOS-latest'
|
||||
uses: gradle/gradle-build-action@v2.4.2
|
||||
with:
|
||||
arguments: |
|
||||
publishMacosX64PublicationToSpaceRepository
|
||||
publishMacosArm64PublicationToSpaceRepository
|
||||
publishIosX64PublicationToSpaceRepository
|
||||
publishIosArm64PublicationToSpaceRepository
|
||||
publishIosSimulatorArm64PublicationToSpaceRepository
|
||||
-Ppublishing.targets=all
|
||||
-Ppublishing.space.user=${{ secrets.SPACE_APP_ID }}
|
||||
-Ppublishing.space.token=${{ secrets.SPACE_APP_SECRET }}
|
3
.gitignore
vendored
3
.gitignore
vendored
@ -6,5 +6,4 @@ out/
|
||||
build/
|
||||
|
||||
|
||||
!gradle-wrapper.jar
|
||||
gradle.properties
|
||||
!gradle-wrapper.jar
|
217
CHANGELOG.md
Normal file
217
CHANGELOG.md
Normal file
@ -0,0 +1,217 @@
|
||||
# Changelog
|
||||
|
||||
## Unreleased
|
||||
|
||||
### Added
|
||||
|
||||
### Changed
|
||||
|
||||
### Deprecated
|
||||
|
||||
### Removed
|
||||
|
||||
### Fixed
|
||||
|
||||
### Security
|
||||
|
||||
## 0.7.0 - 2023-11-26
|
||||
|
||||
### Added
|
||||
|
||||
- Obligatory `type: KType` and `descriptor` property for `MetaConverters`
|
||||
- Added separate `Meta`, `SealedMeta` and `ObservableMutableMeta` builders.
|
||||
|
||||
### Changed
|
||||
|
||||
- Meta converter `metaToObject` returns a non-nullable type. Additional method `metaToObjectOrNull` for nullable return.
|
||||
- Kotlin 1.9.20.
|
||||
- Migrated from ktor-io to kotlinx-io.
|
||||
- `MutableMeta` builder now returns a simplified version of meta that does not hold listeners.
|
||||
- More concise names for read/write methods in IO.
|
||||
- Remove unnecessary confusion with `get`/`getMeta` by removing `getMeta` from the interface.
|
||||
|
||||
### Deprecated
|
||||
|
||||
- `String.parseValue` is replaced with `Value.parse`
|
||||
|
||||
### Fixed
|
||||
|
||||
- Memory leak in SealedMeta builder
|
||||
|
||||
## 0.6.2 - 2023-07-29
|
||||
|
||||
### Changed
|
||||
|
||||
- Meta to Json serializer now serializes a single item with index as an array. It is important for plotly integration.
|
||||
- Meta to Json serializes Meta without children a value as literal or array instead of an object with `@value` field.
|
||||
|
||||
## 0.6.1 - 2023-03-31
|
||||
|
||||
### Added
|
||||
|
||||
- File cache for workspace
|
||||
- Smart task metadata transformation for workspace
|
||||
- Add `readOnly` property to descriptors
|
||||
- Add `specOrNull` delegate to meta and Scheme
|
||||
- Suspended read methods to the `Binary`
|
||||
- Synchronously accessed `meta` to all `DataSet`s
|
||||
- More fine-grained types in Action builders.
|
||||
|
||||
### Changed
|
||||
|
||||
- `Name::replaceLast` API
|
||||
- `PluginFactory` no longer requires plugin class
|
||||
- Collection<Named> toMap -> associateByName
|
||||
- Simplified `DFTL` envelope format. Closing symbols are unnecessary. Properties are discontinued.
|
||||
- Meta `get` method allows nullable receiver
|
||||
- `withDefault` functions do not add new keys to meta children and are consistent.
|
||||
- `dataforge.meta.values` package is merged into `dataforge.meta` for better star imports
|
||||
- Kotlin 1.8.20
|
||||
- `Factory` is now `fun interface` and uses `build` instead of `invoke`. `invoke moved to an extension.
|
||||
- KTor 2.0
|
||||
- DataTree `items` call is blocking.
|
||||
- DataSet `getData` is no longer suspended and renamed to `get`
|
||||
- DataSet operates with sequences of data instead of flows
|
||||
- PartialEnvelope uses `Int` instead `UInt`.
|
||||
- `ActiveDataSet` renamed to `DataSource`
|
||||
- `selectOne`->`getByType`
|
||||
- Data traversal in `DataSet` is done via iterator
|
||||
- Remove all unnecessary properties for `IOFormat`
|
||||
- Separate interfaces for `IOReader` and `IOWriter`
|
||||
|
||||
### Deprecated
|
||||
|
||||
- Context.fetch -> Context.request
|
||||
|
||||
### Fixed
|
||||
|
||||
- `readDataDirectory` does not split names with dots
|
||||
- Front matter reader does not crash on non-UTF files
|
||||
- Meta file name in readMeta from directory
|
||||
- Tagless and FrontMatter envelope partial readers fix.
|
||||
|
||||
## 0.5.2
|
||||
|
||||
### Added
|
||||
|
||||
- Yaml plugin
|
||||
- Partial fix to #53
|
||||
|
||||
### Fixed
|
||||
|
||||
- MutableMetaImpl attachment and checks
|
||||
- Listeners in observable meta are replaced by lists
|
||||
- JS number comparison bug.
|
||||
|
||||
## 0.5.0
|
||||
|
||||
### Added
|
||||
|
||||
- Experimental `listOfSpec` delegate.
|
||||
|
||||
### Changed
|
||||
|
||||
- **API breaking** Config is deprecated, use `ObservableMeta` instead.
|
||||
- **API breaking** Descriptor no has a member property `defaultValue` instead of `defaultItem()` extension. It caches default value state on the first call. It is done because computing default on each call is too expensive.
|
||||
- Kotlin 1.5.10
|
||||
- Build tools 0.10.0
|
||||
- Relaxed type restriction on `MetaConverter`. Now nullables are available.
|
||||
- **Huge API-breaking refactoring of Meta**. Meta now can have both value and children. There is only one kind of descriptor now.
|
||||
- **API breaking** `String.toName()` is replaced by `Name.parse()`
|
||||
- **API breaking** Configurable`config` changed to `meta`
|
||||
|
||||
### Removed
|
||||
|
||||
- `Config`
|
||||
- Public PluginManager mutability
|
||||
- Tables and tables-exposed moved to the separate project `tables.kt`
|
||||
- BinaryMetaFormat. Use CBOR encoding instead
|
||||
|
||||
### Fixed
|
||||
|
||||
- Proper json array index treatment.
|
||||
- Proper json index for single-value array.
|
||||
|
||||
## 0.4.0
|
||||
|
||||
### Added
|
||||
|
||||
- LogManager plugin
|
||||
- dataforge-context API dependency on SLF4j
|
||||
- Context `withEnv` and `fetch` methods to manipulate plugins without changing plugins after creation.
|
||||
- Split `ItemDescriptor` into builder and read-only part
|
||||
|
||||
### Changed
|
||||
|
||||
- Kotlin-logging moved from common to JVM and JS. Replaced by console for native.
|
||||
- Package changed to `space.kscience`
|
||||
- Scheme made observable
|
||||
- Global context is a variable (the singleton is hidden and will be deprecated in future)
|
||||
- Kotlin 1.5
|
||||
- Added blank builders for children context.
|
||||
- Refactor loggers
|
||||
|
||||
### Deprecated
|
||||
|
||||
- Direct use of PluginManager
|
||||
|
||||
### Removed
|
||||
|
||||
- Common dependency on Kotlin-logging
|
||||
- Kotlinx-io fork dependency. Replaced by Ktor-io.
|
||||
|
||||
### Fixed
|
||||
|
||||
- Scheme properties properly handle children property change.
|
||||
|
||||
## 0.3.0
|
||||
|
||||
### Added
|
||||
|
||||
- Yaml meta format based on yaml.kt
|
||||
- `Path` builders
|
||||
- Special ValueType for lists
|
||||
- `copy` method to descriptors
|
||||
- Multiplatform yaml meta
|
||||
|
||||
### Changed
|
||||
|
||||
- `ListValue` and `DoubleArrayValue` implement `Iterable`.
|
||||
- Changed the logic of `Value::isList` to check for type instead of size
|
||||
- `Meta{}` builder made inline
|
||||
- Moved `Envelope` builder to a top level function. Companion invoke is deprecated.
|
||||
- Context logging moved to the extension
|
||||
- `number` and `string` methods on `Value` moved to extensions (breaking change)
|
||||
- \[Major breaking change\] Schemes and configurables us `MutableItemProvider` instead of `Config`
|
||||
- \[Major breaking change\] `MetaItem` renamed to `TypedMetaItem` and `MetaItem` is now an alias for `TypedMetaItem<*>`
|
||||
- \[Major breaking change\] Moved `NodeItem` and `ValueItem` to a top level
|
||||
- Plugins are removed from Context constructor and added lazily in ContextBuilder
|
||||
- \[Major breaking change\] Full refactor of DataTree/DataSource
|
||||
- \[Major Breaking change\] Replace KClass with KType in data. Remove direct access to constructors with types.
|
||||
|
||||
## 0.2.0
|
||||
|
||||
### Changed
|
||||
|
||||
- Context content resolution refactor
|
||||
- Kotlin 1.4.10 (build tools 0.6.0)
|
||||
- Empty query in Name is null instead of ""
|
||||
- Provider provides an empty map instead of error by default
|
||||
- Hidden delegates hierarchy in favor of stdlib properties
|
||||
- Removed io depdendency from `dataforge-output`. Replaced Output by Appendable.
|
||||
- Configurable is no longer MutableItemProvider. All functionality moved to Scheme.
|
||||
|
||||
### Deprecated
|
||||
|
||||
- Context activation API
|
||||
- TextRenderer
|
||||
|
||||
### Removed
|
||||
|
||||
- Functional server prototype
|
||||
- `dataforge-output` module
|
||||
|
||||
### Fixed
|
||||
|
||||
- Global context CoroutineScope resolution
|
||||
- Library mode compliance
|
201
LICENSE
Normal file
201
LICENSE
Normal file
@ -0,0 +1,201 @@
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
112
README.md
112
README.md
@ -1,96 +1,38 @@
|
||||
|
||||
[![JetBrains Research](https://jb.gg/badges/research.svg)](https://confluence.jetbrains.com/display/ALL/JetBrains+on+GitHub)
|
||||
[![DOI](https://zenodo.org/badge/148831678.svg)](https://zenodo.org/badge/latestdoi/148831678)
|
||||
|
||||
# Questions and Answers #
|
||||
|
||||
In this section we will try to cover DataForge main ideas in the form of questions and answers.
|
||||
|
||||
## General ##
|
||||
|
||||
**Q:** I have a lot of data to analyze. The analysis process is complicated, requires a lot of stages and data flow is not always obvious. To top it the data size is huge, so I don't want to perform operation I don't need (calculate something I won't need or calculate something twice). And yes, I need it to be performed in parallel and probably on remote computer. By the way, I am sick and tired of scripts that modify other scripts that control scripts. Could you help me?
|
||||
|
||||
**A:** Yes, that is the precisely the problem DataForge was made to solve. It allows to perform some automated data manipulations with automatic optimization and parallelization. The important thing that data processing recipes are made in the declarative way, so it is quite easy to perform computations on a remote station. Also DataForge guarantees reproducibility of analysis results.
|
||||
<hr>
|
||||
|
||||
**Q:** How does it work?
|
||||
|
||||
**A:** At the core of DataForge lies the idea of **metadata processor**. It utilizes the statement that in order to analyze something you need data itself and some additional information about what does that data represent and what does user want as a result. This additional information is called metadata and could be organized in a regular structure (a tree of values not unlike XML or JSON). The important thing is that this distinction leaves no place for user instructions (or scripts). Indeed, the idea of DataForge logic is that one do not need imperative commands. The framework configures itself according to input meta-data and decides what operations should be performed in the most efficient way.
|
||||
<hr>
|
||||
|
||||
**Q:** But where does it take algorithms to use?
|
||||
|
||||
**A:** Of course algorithms must be written somewhere. No magic here. The logic is written in specialized modules. Some modules are provided out of the box at the system core, some need to be developed for specific problem.
|
||||
<hr>
|
||||
|
||||
**Q:** So I still need to write the code? What is the difference then?
|
||||
|
||||
**A:** Yes, someone still need to write the code. But not necessary you. Simple operations could be performed using provided core logic. Also your group can have one programmer writing the logic and all other using it without any real programming expertise. Also the framework organized in a such way that one writes some additional logic, he do not need to thing about complicated thing like parallel computing, resource handling, logging, caching etc. Most of the things are done by the DataForge.
|
||||
<hr>
|
||||
|
||||
## Platform ##
|
||||
|
||||
**Q:** Which platform does DataForge use? Which operation system is it working on?
|
||||
|
||||
**A:** The DataForge is mostly written in Java and utilizes JVM as a platform. It works on any system that supports JVM (meaning almost any modern system excluding some mobile platforms).
|
||||
<hr>
|
||||
|
||||
**Q:** But Java... it is slow!
|
||||
|
||||
**A:** [It is not](https://stackoverflow.com/questions/2163411/is-java-really-slow/2163570#2163570). It lacks some hardware specific optimizations and requires some additional time to start (due to JIT nature), but otherwise it is at least as fast as other languages traditionally used in science. More importantly, the memory safety, tooling support and vast ecosystem makes it №1 candidate for data analysis framework.
|
||||
|
||||
<hr>
|
||||
|
||||
**Q:** Can I use my C++/Fortran/Python code in DataForge?
|
||||
|
||||
**A:** Yes, as long as the code could be called from Java. Most of common languages have a bridge for Java access. There are completely no problems with compiled C/Fortran libraries. Python code could be called via one of existing python-java interfaces. It is also planned to implement remote method invocation for common languages, so your Python, or, say, Julia, code could run in its native environment. The metadata processor paradigm makes it much easier to do so.
|
||||
|
||||
<hr>
|
||||
|
||||
## Features ##
|
||||
|
||||
**Q:** What other features does DataForge provide?
|
||||
|
||||
**A:** Alongside metadata processing (and a lot of tools for metadata manipulation and layering), DataForge has two additional important concepts:
|
||||
|
||||
* **Modularisation**. Contrary to lot other frameworks, DataForge is intrinsically modular. The mandatory part is a rather tiny core module. Everything else could be customized.
|
||||
|
||||
* **Context encapsulation**. Every DataForge task is executed in some context. The context isolates environment for the task and also works as dependency injection base and specifies interaction of the task with the external world.
|
||||
![Gradle build](https://github.com/mipt-npm/dataforge-core/workflows/Gradle%20build/badge.svg)
|
||||
|
||||
|
||||
<hr>
|
||||
### [dataforge-context](dataforge-context)
|
||||
> Context and provider definitions
|
||||
>
|
||||
> **Maturity**: DEVELOPMENT
|
||||
|
||||
**Q:** OK, but now I want to work directly with my measuring devices. How can I do that?
|
||||
### [dataforge-data](dataforge-data)
|
||||
>
|
||||
> **Maturity**: EXPERIMENTAL
|
||||
|
||||
**A:** The [dataforge-control](${site.url}/docs.html#control) module provides interfaces to interact with the hardware. Out of the box it supports safe communication with TCP/IP or COM/tty based devices. Specific device declaration could be done via additional modules. It is also possible to maintain data storage with [datforge-storage](${site.url}/docs.htm#storage) module.
|
||||
### [dataforge-io](dataforge-io)
|
||||
> IO module
|
||||
>
|
||||
> **Maturity**: EXPERIMENTAL
|
||||
|
||||
<hr>
|
||||
### [dataforge-meta](dataforge-meta)
|
||||
> Meta definition and basic operations on meta
|
||||
>
|
||||
> **Maturity**: DEVELOPMENT
|
||||
|
||||
**Q:** Declarations and metadata are good, but I want my scripts back!
|
||||
### [dataforge-scripting](dataforge-scripting)
|
||||
>
|
||||
> **Maturity**: PROTOTYPE
|
||||
|
||||
**A:** We can do that. [GRIND](${site.url}/docs.html#grind) provides a shell-like environment called GrindShell. It allows to run imperative scripts with full access to all of the DataForge functionality. Grind scripts are basically context-encapsulated. Also there are convenient feature wrappers called helpers that could be loaded into the shell when new features modules are added.
|
||||
### [dataforge-workspace](dataforge-workspace)
|
||||
>
|
||||
> **Maturity**: EXPERIMENTAL
|
||||
|
||||
<hr>
|
||||
### [dataforge-io/dataforge-io-yaml](dataforge-io/dataforge-io-yaml)
|
||||
> YAML meta converters and Front Matter envelope format
|
||||
>
|
||||
> **Maturity**: PROTOTYPE
|
||||
|
||||
## Misc ##
|
||||
|
||||
**Q:** So everything looks great, can I replace my ROOT / other data analysis framework with DataForge?
|
||||
|
||||
**A:** One must note, that DataForge is made for analysis, not for visualisation. The visualisation and user interaction capabilities of DataForge are rather limited compared to frameworks like ROOT, JAS3 or DataMelt. The idea is to provide reliable API and core functionality. In fact JAS3 and DataMelt could be used as a frontend for DataForge mechanics. It is planned to add an interface to ROOT via JFreeHep AIDA.
|
||||
|
||||
<hr>
|
||||
|
||||
**Q:** How does DataForge compare to cluster computation frameworks like Hadoop or Spark?
|
||||
|
||||
**A:** Again, it is not the purpose of DataForge to replace cluster software. DataForge has some internal parallelism mechanics and implementations, but they are most certainly worse then specially developed programs. Still, DataForge is not fixed on one single implementation. Your favourite parallel processing tool could be still used as a back-end for the DataForge. With full benefit of configuration tools, integrations and no performance overhead.
|
||||
|
||||
<hr>
|
||||
|
||||
**Q:** Is it possible to use DataForge in notebook mode?
|
||||
|
||||
**A:** Yes, it is. DataForge can be used as is from [beaker/beakerx](http://beakernotebook.com/) groovy kernel with minor additional adjustments. It is planned to provide separate DataForge kernel to `beakerx` which will automatically call a specific GRIND shell.
|
||||
|
||||
<hr>
|
||||
|
||||
**Q:** Can I use DataForge on a mobile platform?
|
||||
|
||||
**A:** DataForge is modular. Core and the most of api are pretty compact, so it could be used in Android applications. Some modules are designed for PC and could not be used on other platforms. IPhone does not support Java and therefore could use only client-side DataForge applications.
|
||||
|
@ -1,24 +1,39 @@
|
||||
import scientifik.ScientifikExtension
|
||||
import org.jetbrains.kotlin.gradle.tasks.KotlinCompile
|
||||
import space.kscience.gradle.useApache2Licence
|
||||
import space.kscience.gradle.useSPCTeam
|
||||
|
||||
plugins {
|
||||
id("scientifik.mpp") version "0.2.4" apply false
|
||||
id("scientifik.jvm") version "0.2.4" apply false
|
||||
id("scientifik.publish") version "0.2.4" apply false
|
||||
id("space.kscience.gradle.project")
|
||||
}
|
||||
|
||||
val dataforgeVersion by extra("0.1.5-dev-3")
|
||||
|
||||
val bintrayRepo by extra("dataforge")
|
||||
val githubProject by extra("dataforge-core")
|
||||
|
||||
allprojects {
|
||||
group = "hep.dataforge"
|
||||
version = dataforgeVersion
|
||||
group = "space.kscience"
|
||||
version = "0.7.0"
|
||||
}
|
||||
|
||||
subprojects {
|
||||
apply(plugin = "scientifik.publish")
|
||||
afterEvaluate {
|
||||
extensions.findByType<ScientifikExtension>()?.apply { withDokka() }
|
||||
apply(plugin = "maven-publish")
|
||||
|
||||
tasks.withType<KotlinCompile> {
|
||||
kotlinOptions {
|
||||
freeCompilerArgs = freeCompilerArgs + "-Xcontext-receivers"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
readme {
|
||||
readmeTemplate = file("docs/templates/README-TEMPLATE.md")
|
||||
}
|
||||
|
||||
ksciencePublish {
|
||||
pom("https://github.com/SciProgCentre/kmath") {
|
||||
useApache2Licence()
|
||||
useSPCTeam()
|
||||
}
|
||||
repository("spc","https://maven.sciprog.center/kscience")
|
||||
sonatype()
|
||||
}
|
||||
|
||||
apiValidation {
|
||||
nonPublicMarkers.add("space.kscience.dataforge.misc.DFExperimental")
|
||||
}
|
23
dataforge-context/README.md
Normal file
23
dataforge-context/README.md
Normal file
@ -0,0 +1,23 @@
|
||||
# Module dataforge-context
|
||||
|
||||
Context and provider definitions
|
||||
|
||||
## Usage
|
||||
|
||||
## Artifact:
|
||||
|
||||
The Maven coordinates of this project are `space.kscience:dataforge-context:0.7.0`.
|
||||
|
||||
**Gradle Kotlin DSL:**
|
||||
```kotlin
|
||||
repositories {
|
||||
maven("https://repo.kotlin.link")
|
||||
//uncomment to access development builds
|
||||
//maven("https://maven.pkg.jetbrains.space/spc/p/sci/dev")
|
||||
mavenCentral()
|
||||
}
|
||||
|
||||
dependencies {
|
||||
implementation("space.kscience:dataforge-context:0.7.0")
|
||||
}
|
||||
```
|
327
dataforge-context/api/dataforge-context.api
Normal file
327
dataforge-context/api/dataforge-context.api
Normal file
@ -0,0 +1,327 @@
|
||||
public abstract class space/kscience/dataforge/context/AbstractPlugin : space/kscience/dataforge/context/Plugin {
|
||||
public fun <init> ()V
|
||||
public fun <init> (Lspace/kscience/dataforge/meta/Meta;)V
|
||||
public synthetic fun <init> (Lspace/kscience/dataforge/meta/Meta;ILkotlin/jvm/internal/DefaultConstructorMarker;)V
|
||||
public fun attach (Lspace/kscience/dataforge/context/Context;)V
|
||||
public fun dependsOn ()Ljava/util/Map;
|
||||
public fun detach ()V
|
||||
public fun getContext ()Lspace/kscience/dataforge/context/Context;
|
||||
public fun getMeta ()Lspace/kscience/dataforge/meta/Meta;
|
||||
public fun isAttached ()Z
|
||||
protected final fun require (Lspace/kscience/dataforge/context/PluginFactory;Lkotlin/reflect/KClass;Lspace/kscience/dataforge/meta/Meta;)Lkotlin/properties/ReadOnlyProperty;
|
||||
public static synthetic fun require$default (Lspace/kscience/dataforge/context/AbstractPlugin;Lspace/kscience/dataforge/context/PluginFactory;Lkotlin/reflect/KClass;Lspace/kscience/dataforge/meta/Meta;ILjava/lang/Object;)Lkotlin/properties/ReadOnlyProperty;
|
||||
}
|
||||
|
||||
public final class space/kscience/dataforge/context/AbstractPluginKt {
|
||||
public static final fun associateByName (Ljava/util/Collection;)Ljava/util/Map;
|
||||
}
|
||||
|
||||
public final class space/kscience/dataforge/context/ClassLoaderPlugin : space/kscience/dataforge/context/AbstractPlugin {
|
||||
public static final field Companion Lspace/kscience/dataforge/context/ClassLoaderPlugin$Companion;
|
||||
public fun <init> (Ljava/lang/ClassLoader;)V
|
||||
public fun getTag ()Lspace/kscience/dataforge/context/PluginTag;
|
||||
public final fun services (Lkotlin/reflect/KClass;)Lkotlin/sequences/Sequence;
|
||||
}
|
||||
|
||||
public final class space/kscience/dataforge/context/ClassLoaderPlugin$Companion {
|
||||
public final fun getDEFAULT ()Lspace/kscience/dataforge/context/ClassLoaderPlugin;
|
||||
}
|
||||
|
||||
public final class space/kscience/dataforge/context/ClassLoaderPluginKt {
|
||||
public static final fun getClassLoaderPlugin (Lspace/kscience/dataforge/context/Context;)Lspace/kscience/dataforge/context/ClassLoaderPlugin;
|
||||
}
|
||||
|
||||
public class space/kscience/dataforge/context/Context : kotlinx/coroutines/CoroutineScope, space/kscience/dataforge/meta/MetaRepr, space/kscience/dataforge/misc/Named, space/kscience/dataforge/provider/Provider {
|
||||
public static final field Companion Lspace/kscience/dataforge/context/Context$Companion;
|
||||
public static final field PROPERTY_TARGET Ljava/lang/String;
|
||||
public final fun buildContext (Lspace/kscience/dataforge/names/Name;Lkotlin/jvm/functions/Function1;)Lspace/kscience/dataforge/context/Context;
|
||||
public static synthetic fun buildContext$default (Lspace/kscience/dataforge/context/Context;Lspace/kscience/dataforge/names/Name;Lkotlin/jvm/functions/Function1;ILjava/lang/Object;)Lspace/kscience/dataforge/context/Context;
|
||||
public fun close ()V
|
||||
public fun content (Ljava/lang/String;)Ljava/util/Map;
|
||||
public final fun content (Ljava/lang/String;Z)Ljava/util/Map;
|
||||
public fun getCoroutineContext ()Lkotlin/coroutines/CoroutineContext;
|
||||
public fun getDefaultTarget ()Ljava/lang/String;
|
||||
public final fun getName ()Lspace/kscience/dataforge/names/Name;
|
||||
public final fun getParent ()Lspace/kscience/dataforge/context/Context;
|
||||
public final fun getPlugins ()Lspace/kscience/dataforge/context/PluginManager;
|
||||
public final fun getProperties ()Lspace/kscience/dataforge/meta/Laminate;
|
||||
public fun toMeta ()Lspace/kscience/dataforge/meta/Meta;
|
||||
}
|
||||
|
||||
public final class space/kscience/dataforge/context/Context$Companion {
|
||||
}
|
||||
|
||||
public abstract interface class space/kscience/dataforge/context/ContextAware {
|
||||
public abstract fun getContext ()Lspace/kscience/dataforge/context/Context;
|
||||
}
|
||||
|
||||
public final class space/kscience/dataforge/context/ContextBuilder {
|
||||
public final fun build ()Lspace/kscience/dataforge/context/Context;
|
||||
public final fun getName ()Lspace/kscience/dataforge/names/Name;
|
||||
public final fun plugin (Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;Lkotlin/jvm/functions/Function1;)V
|
||||
public final fun plugin (Lspace/kscience/dataforge/context/Plugin;)V
|
||||
public final fun plugin (Lspace/kscience/dataforge/context/PluginFactory;Lkotlin/jvm/functions/Function1;)V
|
||||
public final fun plugin (Lspace/kscience/dataforge/context/PluginFactory;Lspace/kscience/dataforge/meta/Meta;)V
|
||||
public final fun plugin (Lspace/kscience/dataforge/context/PluginTag;Lkotlin/jvm/functions/Function1;)V
|
||||
public static synthetic fun plugin$default (Lspace/kscience/dataforge/context/ContextBuilder;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;Lkotlin/jvm/functions/Function1;ILjava/lang/Object;)V
|
||||
public static synthetic fun plugin$default (Lspace/kscience/dataforge/context/ContextBuilder;Lspace/kscience/dataforge/context/PluginFactory;Lkotlin/jvm/functions/Function1;ILjava/lang/Object;)V
|
||||
public static synthetic fun plugin$default (Lspace/kscience/dataforge/context/ContextBuilder;Lspace/kscience/dataforge/context/PluginTag;Lkotlin/jvm/functions/Function1;ILjava/lang/Object;)V
|
||||
public final fun properties (Lkotlin/jvm/functions/Function1;)V
|
||||
}
|
||||
|
||||
public final class space/kscience/dataforge/context/ContextBuilderKt {
|
||||
}
|
||||
|
||||
public final class space/kscience/dataforge/context/DefaultLogManager : space/kscience/dataforge/context/AbstractPlugin, space/kscience/dataforge/context/LogManager {
|
||||
public static final field Companion Lspace/kscience/dataforge/context/DefaultLogManager$Companion;
|
||||
public fun <init> ()V
|
||||
public fun getDefaultLogger ()Lspace/kscience/dataforge/context/Logger;
|
||||
public fun getTag ()Lspace/kscience/dataforge/context/PluginTag;
|
||||
public fun logger (Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/context/Logger;
|
||||
}
|
||||
|
||||
public final class space/kscience/dataforge/context/DefaultLogManager$Companion : space/kscience/dataforge/context/PluginFactory {
|
||||
public synthetic fun build (Lspace/kscience/dataforge/context/Context;Lspace/kscience/dataforge/meta/Meta;)Ljava/lang/Object;
|
||||
public fun build (Lspace/kscience/dataforge/context/Context;Lspace/kscience/dataforge/meta/Meta;)Lspace/kscience/dataforge/context/DefaultLogManager;
|
||||
public fun getTag ()Lspace/kscience/dataforge/context/PluginTag;
|
||||
}
|
||||
|
||||
public abstract interface class space/kscience/dataforge/context/Factory {
|
||||
public abstract fun build (Lspace/kscience/dataforge/context/Context;Lspace/kscience/dataforge/meta/Meta;)Ljava/lang/Object;
|
||||
}
|
||||
|
||||
public final class space/kscience/dataforge/context/FactoryKt {
|
||||
public static final fun invoke (Lspace/kscience/dataforge/context/Factory;Lspace/kscience/dataforge/meta/Meta;Lspace/kscience/dataforge/context/Context;)Ljava/lang/Object;
|
||||
public static synthetic fun invoke$default (Lspace/kscience/dataforge/context/Factory;Lspace/kscience/dataforge/meta/Meta;Lspace/kscience/dataforge/context/Context;ILjava/lang/Object;)Ljava/lang/Object;
|
||||
}
|
||||
|
||||
public final class space/kscience/dataforge/context/GlobalKt {
|
||||
public static final fun Context (Ljava/lang/String;Lkotlin/jvm/functions/Function1;)Lspace/kscience/dataforge/context/Context;
|
||||
public static synthetic fun Context$default (Ljava/lang/String;Lkotlin/jvm/functions/Function1;ILjava/lang/Object;)Lspace/kscience/dataforge/context/Context;
|
||||
public static final fun getGlobal ()Lspace/kscience/dataforge/context/Context;
|
||||
}
|
||||
|
||||
public abstract interface class space/kscience/dataforge/context/LogManager : space/kscience/dataforge/context/Logger, space/kscience/dataforge/context/Plugin {
|
||||
public static final field Companion Lspace/kscience/dataforge/context/LogManager$Companion;
|
||||
public static final field DEBUG Ljava/lang/String;
|
||||
public static final field ERROR Ljava/lang/String;
|
||||
public static final field INFO Ljava/lang/String;
|
||||
public static final field TRACE Ljava/lang/String;
|
||||
public static final field WARNING Ljava/lang/String;
|
||||
public abstract fun getDefaultLogger ()Lspace/kscience/dataforge/context/Logger;
|
||||
public fun log (Ljava/lang/String;Lkotlin/jvm/functions/Function0;)V
|
||||
public fun log (Lspace/kscience/dataforge/names/Name;Ljava/lang/String;Lkotlin/jvm/functions/Function0;)V
|
||||
public abstract fun logger (Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/context/Logger;
|
||||
}
|
||||
|
||||
public final class space/kscience/dataforge/context/LogManager$Companion {
|
||||
public static final field DEBUG Ljava/lang/String;
|
||||
public static final field ERROR Ljava/lang/String;
|
||||
public static final field INFO Ljava/lang/String;
|
||||
public static final field TRACE Ljava/lang/String;
|
||||
public static final field WARNING Ljava/lang/String;
|
||||
}
|
||||
|
||||
public final class space/kscience/dataforge/context/LogManagerKt {
|
||||
public static final fun debug (Lspace/kscience/dataforge/context/Logger;Lkotlin/jvm/functions/Function0;)V
|
||||
public static final fun error (Lspace/kscience/dataforge/context/Logger;Ljava/lang/Throwable;Lkotlin/jvm/functions/Function0;)V
|
||||
public static final fun error (Lspace/kscience/dataforge/context/Logger;Lkotlin/jvm/functions/Function0;)V
|
||||
public static final fun getLogger (Lspace/kscience/dataforge/context/Context;)Lspace/kscience/dataforge/context/LogManager;
|
||||
public static final fun getLogger (Lspace/kscience/dataforge/context/ContextAware;)Lspace/kscience/dataforge/context/Logger;
|
||||
public static final fun info (Lspace/kscience/dataforge/context/Logger;Lkotlin/jvm/functions/Function0;)V
|
||||
public static final fun trace (Lspace/kscience/dataforge/context/Logger;Lkotlin/jvm/functions/Function0;)V
|
||||
public static final fun warn (Lspace/kscience/dataforge/context/Logger;Lkotlin/jvm/functions/Function0;)V
|
||||
}
|
||||
|
||||
public abstract interface class space/kscience/dataforge/context/Logger {
|
||||
public abstract fun log (Ljava/lang/String;Lkotlin/jvm/functions/Function0;)V
|
||||
}
|
||||
|
||||
public abstract interface class space/kscience/dataforge/context/Plugin : space/kscience/dataforge/context/ContextAware, space/kscience/dataforge/meta/MetaRepr, space/kscience/dataforge/misc/Named, space/kscience/dataforge/provider/Provider {
|
||||
public static final field Companion Lspace/kscience/dataforge/context/Plugin$Companion;
|
||||
public static final field TARGET Ljava/lang/String;
|
||||
public abstract fun attach (Lspace/kscience/dataforge/context/Context;)V
|
||||
public abstract fun dependsOn ()Ljava/util/Map;
|
||||
public abstract fun detach ()V
|
||||
public abstract fun getMeta ()Lspace/kscience/dataforge/meta/Meta;
|
||||
public fun getName ()Lspace/kscience/dataforge/names/Name;
|
||||
public abstract fun getTag ()Lspace/kscience/dataforge/context/PluginTag;
|
||||
public abstract fun isAttached ()Z
|
||||
public fun toMeta ()Lspace/kscience/dataforge/meta/Meta;
|
||||
}
|
||||
|
||||
public final class space/kscience/dataforge/context/Plugin$Companion {
|
||||
public static final field TARGET Ljava/lang/String;
|
||||
}
|
||||
|
||||
public final class space/kscience/dataforge/context/PluginBuilder {
|
||||
public fun <init> (Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;)V
|
||||
public synthetic fun <init> (Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;ILkotlin/jvm/internal/DefaultConstructorMarker;)V
|
||||
public final fun build ()Lspace/kscience/dataforge/context/PluginFactory;
|
||||
public final fun getTag ()Lspace/kscience/dataforge/context/PluginTag;
|
||||
public final fun provides (Ljava/lang/String;Ljava/util/Map;)V
|
||||
public final fun provides (Ljava/lang/String;[Lspace/kscience/dataforge/misc/Named;)V
|
||||
public final fun requires (Lspace/kscience/dataforge/context/PluginFactory;Lspace/kscience/dataforge/meta/Meta;)V
|
||||
public static synthetic fun requires$default (Lspace/kscience/dataforge/context/PluginBuilder;Lspace/kscience/dataforge/context/PluginFactory;Lspace/kscience/dataforge/meta/Meta;ILjava/lang/Object;)V
|
||||
}
|
||||
|
||||
public final class space/kscience/dataforge/context/PluginBuilderKt {
|
||||
public static final fun PluginFactory (Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;Lkotlin/jvm/functions/Function1;)Lspace/kscience/dataforge/context/PluginFactory;
|
||||
public static synthetic fun PluginFactory$default (Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;Lkotlin/jvm/functions/Function1;ILjava/lang/Object;)Lspace/kscience/dataforge/context/PluginFactory;
|
||||
}
|
||||
|
||||
public abstract interface class space/kscience/dataforge/context/PluginFactory : space/kscience/dataforge/context/Factory {
|
||||
public static final field Companion Lspace/kscience/dataforge/context/PluginFactory$Companion;
|
||||
public static final field TYPE Ljava/lang/String;
|
||||
public abstract fun getTag ()Lspace/kscience/dataforge/context/PluginTag;
|
||||
}
|
||||
|
||||
public final class space/kscience/dataforge/context/PluginFactory$Companion {
|
||||
public static final field TYPE Ljava/lang/String;
|
||||
}
|
||||
|
||||
public final class space/kscience/dataforge/context/PluginManager : java/lang/Iterable, kotlin/jvm/internal/markers/KMappedMarker, space/kscience/dataforge/context/ContextAware {
|
||||
public final fun find (ZLkotlin/jvm/functions/Function1;)Lspace/kscience/dataforge/context/Plugin;
|
||||
public static synthetic fun find$default (Lspace/kscience/dataforge/context/PluginManager;ZLkotlin/jvm/functions/Function1;ILjava/lang/Object;)Lspace/kscience/dataforge/context/Plugin;
|
||||
public final fun get (Lspace/kscience/dataforge/context/PluginTag;Z)Lspace/kscience/dataforge/context/Plugin;
|
||||
public static synthetic fun get$default (Lspace/kscience/dataforge/context/PluginManager;Lspace/kscience/dataforge/context/PluginTag;ZILjava/lang/Object;)Lspace/kscience/dataforge/context/Plugin;
|
||||
public final fun getByType (Lkotlin/reflect/KClass;Lspace/kscience/dataforge/context/PluginTag;Z)Ljava/lang/Object;
|
||||
public static synthetic fun getByType$default (Lspace/kscience/dataforge/context/PluginManager;Lkotlin/reflect/KClass;Lspace/kscience/dataforge/context/PluginTag;ZILjava/lang/Object;)Ljava/lang/Object;
|
||||
public fun getContext ()Lspace/kscience/dataforge/context/Context;
|
||||
public fun iterator ()Ljava/util/Iterator;
|
||||
public final fun list (Z)Ljava/util/Collection;
|
||||
}
|
||||
|
||||
public final class space/kscience/dataforge/context/PluginTag : space/kscience/dataforge/meta/MetaRepr {
|
||||
public static final field Companion Lspace/kscience/dataforge/context/PluginTag$Companion;
|
||||
public static final field DATAFORGE_GROUP Ljava/lang/String;
|
||||
public fun <init> (Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;)V
|
||||
public synthetic fun <init> (Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;ILkotlin/jvm/internal/DefaultConstructorMarker;)V
|
||||
public final fun component1 ()Ljava/lang/String;
|
||||
public final fun component2 ()Ljava/lang/String;
|
||||
public final fun component3 ()Ljava/lang/String;
|
||||
public final fun copy (Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;)Lspace/kscience/dataforge/context/PluginTag;
|
||||
public static synthetic fun copy$default (Lspace/kscience/dataforge/context/PluginTag;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;ILjava/lang/Object;)Lspace/kscience/dataforge/context/PluginTag;
|
||||
public fun equals (Ljava/lang/Object;)Z
|
||||
public final fun getGroup ()Ljava/lang/String;
|
||||
public final fun getName ()Ljava/lang/String;
|
||||
public final fun getVersion ()Ljava/lang/String;
|
||||
public fun hashCode ()I
|
||||
public final fun matches (Lspace/kscience/dataforge/context/PluginTag;)Z
|
||||
public fun toMeta ()Lspace/kscience/dataforge/meta/Meta;
|
||||
public fun toString ()Ljava/lang/String;
|
||||
}
|
||||
|
||||
public final class space/kscience/dataforge/context/PluginTag$$serializer : kotlinx/serialization/internal/GeneratedSerializer {
|
||||
public static final field INSTANCE Lspace/kscience/dataforge/context/PluginTag$$serializer;
|
||||
public fun childSerializers ()[Lkotlinx/serialization/KSerializer;
|
||||
public synthetic fun deserialize (Lkotlinx/serialization/encoding/Decoder;)Ljava/lang/Object;
|
||||
public fun deserialize (Lkotlinx/serialization/encoding/Decoder;)Lspace/kscience/dataforge/context/PluginTag;
|
||||
public fun getDescriptor ()Lkotlinx/serialization/descriptors/SerialDescriptor;
|
||||
public synthetic fun serialize (Lkotlinx/serialization/encoding/Encoder;Ljava/lang/Object;)V
|
||||
public fun serialize (Lkotlinx/serialization/encoding/Encoder;Lspace/kscience/dataforge/context/PluginTag;)V
|
||||
public fun typeParametersSerializers ()[Lkotlinx/serialization/KSerializer;
|
||||
}
|
||||
|
||||
public final class space/kscience/dataforge/context/PluginTag$Companion {
|
||||
public final fun fromString (Ljava/lang/String;)Lspace/kscience/dataforge/context/PluginTag;
|
||||
public final fun serializer ()Lkotlinx/serialization/KSerializer;
|
||||
}
|
||||
|
||||
public final class space/kscience/dataforge/context/ResolveKt {
|
||||
public static final fun gather (Lspace/kscience/dataforge/context/Context;Ljava/lang/String;Lkotlin/reflect/KClass;Z)Ljava/util/Map;
|
||||
public static synthetic fun gather$default (Lspace/kscience/dataforge/context/Context;Ljava/lang/String;Lkotlin/reflect/KClass;ZILjava/lang/Object;)Ljava/util/Map;
|
||||
public static final fun getValues (Lkotlin/sequences/Sequence;)Lkotlin/sequences/Sequence;
|
||||
public static final fun resolve (Lspace/kscience/dataforge/context/Context;Ljava/lang/String;Lspace/kscience/dataforge/names/Name;Lkotlin/reflect/KClass;)Ljava/lang/Object;
|
||||
}
|
||||
|
||||
public final class space/kscience/dataforge/context/SlfLogManager : space/kscience/dataforge/context/AbstractPlugin, space/kscience/dataforge/context/LogManager {
|
||||
public static final field Companion Lspace/kscience/dataforge/context/SlfLogManager$Companion;
|
||||
public fun <init> ()V
|
||||
public fun getDefaultLogger ()Lspace/kscience/dataforge/context/Logger;
|
||||
public fun getTag ()Lspace/kscience/dataforge/context/PluginTag;
|
||||
public fun logger (Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/context/Logger;
|
||||
}
|
||||
|
||||
public final class space/kscience/dataforge/context/SlfLogManager$Companion : space/kscience/dataforge/context/PluginFactory {
|
||||
public synthetic fun build (Lspace/kscience/dataforge/context/Context;Lspace/kscience/dataforge/meta/Meta;)Ljava/lang/Object;
|
||||
public fun build (Lspace/kscience/dataforge/context/Context;Lspace/kscience/dataforge/meta/Meta;)Lspace/kscience/dataforge/context/SlfLogManager;
|
||||
public fun getTag ()Lspace/kscience/dataforge/context/PluginTag;
|
||||
}
|
||||
|
||||
public final class space/kscience/dataforge/properties/PropertyKt {
|
||||
}
|
||||
|
||||
public final class space/kscience/dataforge/properties/SchemePropertyKt {
|
||||
}
|
||||
|
||||
public final class space/kscience/dataforge/provider/DfTypeKt {
|
||||
}
|
||||
|
||||
public final class space/kscience/dataforge/provider/Path : java/lang/Iterable, kotlin/jvm/internal/markers/KMappedMarker {
|
||||
public static final field Companion Lspace/kscience/dataforge/provider/Path$Companion;
|
||||
public static final field PATH_SEGMENT_SEPARATOR Ljava/lang/String;
|
||||
public static final synthetic fun box-impl (Ljava/util/List;)Lspace/kscience/dataforge/provider/Path;
|
||||
public static fun constructor-impl (Ljava/util/List;)Ljava/util/List;
|
||||
public fun equals (Ljava/lang/Object;)Z
|
||||
public static fun equals-impl (Ljava/util/List;Ljava/lang/Object;)Z
|
||||
public static final fun equals-impl0 (Ljava/util/List;Ljava/util/List;)Z
|
||||
public final fun getTokens ()Ljava/util/List;
|
||||
public fun hashCode ()I
|
||||
public static fun hashCode-impl (Ljava/util/List;)I
|
||||
public fun iterator ()Ljava/util/Iterator;
|
||||
public static fun iterator-impl (Ljava/util/List;)Ljava/util/Iterator;
|
||||
public fun toString ()Ljava/lang/String;
|
||||
public static fun toString-impl (Ljava/util/List;)Ljava/lang/String;
|
||||
public final synthetic fun unbox-impl ()Ljava/util/List;
|
||||
}
|
||||
|
||||
public final class space/kscience/dataforge/provider/Path$Companion {
|
||||
public final fun parse-X5wN5Vs (Ljava/lang/String;)Ljava/util/List;
|
||||
}
|
||||
|
||||
public final class space/kscience/dataforge/provider/PathKt {
|
||||
public static final fun Path ([Lkotlin/Pair;)Ljava/util/List;
|
||||
public static final fun Path ([Lspace/kscience/dataforge/names/Name;)Ljava/util/List;
|
||||
public static final fun asPath (Lspace/kscience/dataforge/names/Name;Ljava/lang/String;)Ljava/util/List;
|
||||
public static final fun asPath (Lspace/kscience/dataforge/provider/PathToken;)Ljava/util/List;
|
||||
public static synthetic fun asPath$default (Lspace/kscience/dataforge/names/Name;Ljava/lang/String;ILjava/lang/Object;)Ljava/util/List;
|
||||
public static final fun getHead-LGjlSZY (Ljava/util/List;)Lspace/kscience/dataforge/provider/PathToken;
|
||||
public static final fun getLength-LGjlSZY (Ljava/util/List;)I
|
||||
public static final fun getTail-LGjlSZY (Ljava/util/List;)Ljava/util/List;
|
||||
public static final fun plus-sn2Gq0g (Ljava/util/List;Ljava/util/List;)Ljava/util/List;
|
||||
}
|
||||
|
||||
public final class space/kscience/dataforge/provider/PathToken {
|
||||
public static final field Companion Lspace/kscience/dataforge/provider/PathToken$Companion;
|
||||
public static final field TARGET_SEPARATOR Ljava/lang/String;
|
||||
public fun <init> (Lspace/kscience/dataforge/names/Name;Ljava/lang/String;)V
|
||||
public synthetic fun <init> (Lspace/kscience/dataforge/names/Name;Ljava/lang/String;ILkotlin/jvm/internal/DefaultConstructorMarker;)V
|
||||
public final fun component1 ()Lspace/kscience/dataforge/names/Name;
|
||||
public final fun component2 ()Ljava/lang/String;
|
||||
public final fun copy (Lspace/kscience/dataforge/names/Name;Ljava/lang/String;)Lspace/kscience/dataforge/provider/PathToken;
|
||||
public static synthetic fun copy$default (Lspace/kscience/dataforge/provider/PathToken;Lspace/kscience/dataforge/names/Name;Ljava/lang/String;ILjava/lang/Object;)Lspace/kscience/dataforge/provider/PathToken;
|
||||
public fun equals (Ljava/lang/Object;)Z
|
||||
public final fun getName ()Lspace/kscience/dataforge/names/Name;
|
||||
public final fun getTarget ()Ljava/lang/String;
|
||||
public fun hashCode ()I
|
||||
public fun toString ()Ljava/lang/String;
|
||||
}
|
||||
|
||||
public final class space/kscience/dataforge/provider/PathToken$Companion {
|
||||
public final fun parse (Ljava/lang/String;Z)Lspace/kscience/dataforge/provider/PathToken;
|
||||
public static synthetic fun parse$default (Lspace/kscience/dataforge/provider/PathToken$Companion;Ljava/lang/String;ZILjava/lang/Object;)Lspace/kscience/dataforge/provider/PathToken;
|
||||
}
|
||||
|
||||
public abstract interface class space/kscience/dataforge/provider/Provider {
|
||||
public fun content (Ljava/lang/String;)Ljava/util/Map;
|
||||
public fun getDefaultChainTarget ()Ljava/lang/String;
|
||||
public fun getDefaultTarget ()Ljava/lang/String;
|
||||
}
|
||||
|
||||
public final class space/kscience/dataforge/provider/ProviderKt {
|
||||
public static final fun provide-CSkoCSg (Lspace/kscience/dataforge/provider/Provider;Ljava/util/List;Ljava/lang/String;)Ljava/lang/Object;
|
||||
public static synthetic fun provide-CSkoCSg$default (Lspace/kscience/dataforge/provider/Provider;Ljava/util/List;Ljava/lang/String;ILjava/lang/Object;)Ljava/lang/Object;
|
||||
public static final fun top (Lspace/kscience/dataforge/provider/Provider;Ljava/lang/String;Lkotlin/reflect/KClass;)Ljava/util/Map;
|
||||
}
|
||||
|
@ -1,33 +1,24 @@
|
||||
plugins {
|
||||
id("scientifik.mpp")
|
||||
id("space.kscience.gradle.mpp")
|
||||
}
|
||||
|
||||
description = "Context and provider definitions"
|
||||
|
||||
val coroutinesVersion: String = Scientifik.coroutinesVersion
|
||||
|
||||
kotlin {
|
||||
sourceSets {
|
||||
val commonMain by getting {
|
||||
dependencies {
|
||||
api(project(":dataforge-meta"))
|
||||
api(kotlin("reflect"))
|
||||
api("io.github.microutils:kotlin-logging-common:1.7.2")
|
||||
api("org.jetbrains.kotlinx:kotlinx-coroutines-core-common:$coroutinesVersion")
|
||||
}
|
||||
}
|
||||
val jvmMain by getting {
|
||||
dependencies {
|
||||
api("io.github.microutils:kotlin-logging:1.7.2")
|
||||
api("ch.qos.logback:logback-classic:1.2.3")
|
||||
api("org.jetbrains.kotlinx:kotlinx-coroutines-core:$coroutinesVersion")
|
||||
}
|
||||
}
|
||||
val jsMain by getting {
|
||||
dependencies {
|
||||
api("io.github.microutils:kotlin-logging-js:1.7.2")
|
||||
api("org.jetbrains.kotlinx:kotlinx-coroutines-core-js:$coroutinesVersion")
|
||||
}
|
||||
}
|
||||
kscience {
|
||||
jvm()
|
||||
js()
|
||||
native()
|
||||
useCoroutines()
|
||||
useSerialization()
|
||||
dependencies {
|
||||
api(project(":dataforge-meta"))
|
||||
}
|
||||
dependencies(jvmMain){
|
||||
api(kotlin("reflect"))
|
||||
api("org.slf4j:slf4j-api:1.7.30")
|
||||
}
|
||||
}
|
||||
|
||||
readme {
|
||||
maturity = space.kscience.gradle.Maturity.DEVELOPMENT
|
||||
}
|
@ -1,44 +0,0 @@
|
||||
package hep.dataforge.context
|
||||
|
||||
import hep.dataforge.meta.EmptyMeta
|
||||
import hep.dataforge.meta.Meta
|
||||
import hep.dataforge.names.Name
|
||||
import kotlin.properties.ReadOnlyProperty
|
||||
import kotlin.reflect.KClass
|
||||
import kotlin.reflect.KProperty
|
||||
|
||||
abstract class AbstractPlugin(override val meta: Meta = EmptyMeta) : Plugin {
|
||||
private var _context: Context? = null
|
||||
private val dependencies = ArrayList<PluginFactory<*>>()
|
||||
|
||||
override val context: Context
|
||||
get() = _context ?: error("Plugin $tag is not attached")
|
||||
|
||||
override fun attach(context: Context) {
|
||||
this._context = context
|
||||
}
|
||||
|
||||
override fun detach() {
|
||||
this._context = null
|
||||
}
|
||||
|
||||
final override fun dependsOn(): List<PluginFactory<*>> = dependencies
|
||||
|
||||
/**
|
||||
* Register plugin dependency and return a delegate which provides lazily initialized reference to dependent plugin
|
||||
*/
|
||||
protected fun <P : Plugin> require(factory: PluginFactory<P>): ReadOnlyProperty<AbstractPlugin, P> {
|
||||
dependencies.add(factory)
|
||||
return PluginDependencyDelegate(factory.type)
|
||||
}
|
||||
|
||||
override fun provideTop(target: String): Map<Name, Any> = emptyMap()
|
||||
}
|
||||
|
||||
fun <T : Named> Collection<T>.toMap(): Map<Name, T> = associate { it.name to it }
|
||||
|
||||
private class PluginDependencyDelegate<P : Plugin>(val type: KClass<out P>) : ReadOnlyProperty<AbstractPlugin, P> {
|
||||
override fun getValue(thisRef: AbstractPlugin, property: KProperty<*>): P {
|
||||
return thisRef.context.plugins[type] ?: error("Plugin with type $type not found")
|
||||
}
|
||||
}
|
@ -1,181 +0,0 @@
|
||||
package hep.dataforge.context
|
||||
|
||||
import hep.dataforge.meta.*
|
||||
import hep.dataforge.names.Name
|
||||
import hep.dataforge.names.asName
|
||||
import hep.dataforge.names.plus
|
||||
import hep.dataforge.provider.Provider
|
||||
import hep.dataforge.provider.top
|
||||
import hep.dataforge.values.Value
|
||||
import kotlinx.coroutines.CoroutineScope
|
||||
import mu.KLogger
|
||||
import mu.KotlinLogging
|
||||
import kotlin.coroutines.CoroutineContext
|
||||
import kotlin.coroutines.EmptyCoroutineContext
|
||||
import kotlin.jvm.JvmName
|
||||
|
||||
/**
|
||||
* The local environment for anything being done in DataForge framework. Contexts are organized into tree structure with [Global] at the top.
|
||||
* Context has [properties] - equivalent for system environment values, but grouped into a tree and inherited from parent context.
|
||||
*
|
||||
* The main function of the Context is to provide [PluginManager] which stores the loaded plugins and works as a dependency injection point.
|
||||
* The normal behaviour of the [PluginManager] is to search for a plugin in parent context if it is not found in a current one. It is possible to have
|
||||
* different plugins with the same interface in different contexts in the hierarchy. The usual behaviour is to use nearest one, but it could
|
||||
* be overridden by plugin implementation.
|
||||
*
|
||||
* Since plugins could contain mutable state, context has two states: active and inactive. No changes are allowed to active context.
|
||||
* @author Alexander Nozik
|
||||
*/
|
||||
open class Context(
|
||||
final override val name: Name,
|
||||
val parent: Context? = Global
|
||||
) : Named, MetaRepr, Provider, CoroutineScope {
|
||||
|
||||
private val config = Config()
|
||||
|
||||
/**
|
||||
* Context properties. Working as substitute for environment variables
|
||||
*/
|
||||
val properties: Meta = if (parent == null) {
|
||||
config
|
||||
} else {
|
||||
Laminate(config, parent.properties)
|
||||
}
|
||||
|
||||
/**
|
||||
* Context logger
|
||||
*/
|
||||
val logger: KLogger = KotlinLogging.logger(name.toString())
|
||||
|
||||
/**
|
||||
* A [PluginManager] for current context
|
||||
*/
|
||||
val plugins: PluginManager by lazy { PluginManager(this) }
|
||||
|
||||
private val activators = HashSet<Any>()
|
||||
|
||||
/**
|
||||
* Defines if context is used in any kind of active computations. Active context properties and plugins could not be changed
|
||||
*/
|
||||
val isActive: Boolean = activators.isNotEmpty()
|
||||
|
||||
override val defaultTarget: String get() = Plugin.PLUGIN_TARGET
|
||||
|
||||
override fun provideTop(target: String): Map<Name, Any> {
|
||||
return when (target) {
|
||||
Value.TYPE -> properties.sequence().toMap()
|
||||
Plugin.PLUGIN_TARGET -> plugins.sequence(true).associateBy { it.name }
|
||||
else -> emptyMap()
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Mark context as active and used by [activator]
|
||||
*/
|
||||
fun activate(activator: Any) {
|
||||
activators.add(activator)
|
||||
}
|
||||
|
||||
/**
|
||||
* Mark context unused by [activator]
|
||||
*/
|
||||
fun deactivate(activator: Any) {
|
||||
activators.remove(activator)
|
||||
}
|
||||
|
||||
/**
|
||||
* Change the properties of the context. If active, throw an exception
|
||||
*/
|
||||
fun configure(action: Config.() -> Unit) {
|
||||
if (isActive) error("Can't configure active context")
|
||||
config.action()
|
||||
}
|
||||
|
||||
override val coroutineContext: CoroutineContext
|
||||
get() = EmptyCoroutineContext
|
||||
|
||||
/**
|
||||
* Detach all plugins and terminate context
|
||||
*/
|
||||
open fun close() {
|
||||
if (isActive) error("Can't close active context")
|
||||
//detach all plugins
|
||||
plugins.forEach { it.detach() }
|
||||
}
|
||||
|
||||
override fun toMeta(): Meta = buildMeta {
|
||||
"parent" to parent?.name
|
||||
"properties" put properties.seal()
|
||||
"plugins" put plugins.map { it.toMeta() }
|
||||
}
|
||||
}
|
||||
|
||||
fun Context.content(target: String): Map<Name, Any> = content<Any>(target)
|
||||
|
||||
/**
|
||||
* A map of all objects provided by plugins with given target and type
|
||||
*/
|
||||
@JvmName("typedContent")
|
||||
inline fun <reified T : Any> Context.content(target: String): Map<Name, T> =
|
||||
plugins.flatMap { plugin ->
|
||||
plugin.top<T>(target).entries.map { (plugin.name + it.key) to it.value }
|
||||
}.associate { it }
|
||||
|
||||
|
||||
/**
|
||||
* A global root context. Closing [Global] terminates the framework.
|
||||
*/
|
||||
object Global : Context("GLOBAL".asName(), null) {
|
||||
/**
|
||||
* Closing all contexts
|
||||
*
|
||||
* @throws Exception
|
||||
*/
|
||||
override fun close() {
|
||||
logger.info { "Shutting down GLOBAL" }
|
||||
for (ctx in contextRegistry.values) {
|
||||
ctx.close()
|
||||
}
|
||||
super.close()
|
||||
}
|
||||
|
||||
private val contextRegistry = HashMap<String, Context>()
|
||||
|
||||
/**
|
||||
* Get previously built context
|
||||
*
|
||||
* @param name
|
||||
* @return
|
||||
*/
|
||||
fun getContext(name: String): Context? {
|
||||
return contextRegistry[name]
|
||||
}
|
||||
|
||||
fun context(name: String, parent: Context = this, block: ContextBuilder.() -> Unit = {}): Context =
|
||||
ContextBuilder(name, parent).apply(block).build()
|
||||
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* The interface for something that encapsulated in context
|
||||
*
|
||||
* @author Alexander Nozik
|
||||
* @version $Id: $Id
|
||||
*/
|
||||
interface ContextAware {
|
||||
/**
|
||||
* Get context for this object
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
val context: Context
|
||||
|
||||
val logger: KLogger
|
||||
get() = if (this is Named) {
|
||||
KotlinLogging.logger((context.name + this.name).toString())
|
||||
} else {
|
||||
context.logger
|
||||
}
|
||||
|
||||
}
|
@ -1,43 +0,0 @@
|
||||
package hep.dataforge.context
|
||||
|
||||
import hep.dataforge.meta.DFBuilder
|
||||
import hep.dataforge.meta.MetaBuilder
|
||||
import hep.dataforge.meta.buildMeta
|
||||
import hep.dataforge.names.toName
|
||||
|
||||
/**
|
||||
* A convenience builder for context
|
||||
*/
|
||||
@DFBuilder
|
||||
class ContextBuilder(var name: String = "@anonymous", val parent: Context = Global) {
|
||||
private val plugins = ArrayList<Plugin>()
|
||||
private var meta = MetaBuilder()
|
||||
|
||||
fun properties(action: MetaBuilder.() -> Unit) {
|
||||
meta.action()
|
||||
}
|
||||
|
||||
fun plugin(plugin: Plugin) {
|
||||
plugins.add(plugin)
|
||||
}
|
||||
|
||||
fun plugin(tag: PluginTag, action: MetaBuilder.() -> Unit = {}) {
|
||||
plugins.add(PluginRepository.fetch(tag, buildMeta(action)))
|
||||
}
|
||||
|
||||
fun plugin(builder: PluginFactory<*>, action: MetaBuilder.() -> Unit = {}) {
|
||||
plugins.add(builder.invoke(buildMeta(action)))
|
||||
}
|
||||
|
||||
fun plugin(name: String, group: String = "", version: String = "", action: MetaBuilder.() -> Unit = {}) {
|
||||
plugin(PluginTag(name, group, version), action)
|
||||
}
|
||||
|
||||
fun build(): Context {
|
||||
return Context(name.toName(), parent).apply {
|
||||
this@ContextBuilder.plugins.forEach {
|
||||
plugins.load(it)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@ -1,8 +0,0 @@
|
||||
package hep.dataforge.context
|
||||
|
||||
import hep.dataforge.meta.EmptyMeta
|
||||
import hep.dataforge.meta.Meta
|
||||
|
||||
interface Factory<out T : Any> {
|
||||
operator fun invoke(meta: Meta = EmptyMeta, context: Context = Global): T
|
||||
}
|
@ -1,141 +0,0 @@
|
||||
package hep.dataforge.context
|
||||
|
||||
import hep.dataforge.meta.EmptyMeta
|
||||
import hep.dataforge.meta.Meta
|
||||
import hep.dataforge.meta.MetaBuilder
|
||||
import hep.dataforge.meta.buildMeta
|
||||
import kotlin.reflect.KClass
|
||||
|
||||
/**
|
||||
* The manager for plugin system. Should monitor plugin dependencies and locks.
|
||||
*
|
||||
* @property context A context for this plugin manager
|
||||
* @author Alexander Nozik
|
||||
*/
|
||||
class PluginManager(override val context: Context) : ContextAware, Iterable<Plugin> {
|
||||
|
||||
/**
|
||||
* A set of loaded plugins
|
||||
*/
|
||||
private val plugins = HashSet<Plugin>()
|
||||
|
||||
/**
|
||||
* A [PluginManager] of parent context if it is present
|
||||
*/
|
||||
private val parent: PluginManager? = context.parent?.plugins
|
||||
|
||||
|
||||
fun sequence(recursive: Boolean): Sequence<Plugin> {
|
||||
return if (recursive && parent != null) {
|
||||
plugins.asSequence() + parent.sequence(true)
|
||||
} else {
|
||||
plugins.asSequence()
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get existing plugin or return null if not present. Only first matching plugin is returned.
|
||||
* @param recursive search for parent [PluginManager] plugins
|
||||
* @param predicate condition for the plugin
|
||||
*/
|
||||
fun find(recursive: Boolean = true, predicate: (Plugin) -> Boolean): Plugin? = sequence(recursive).find(predicate)
|
||||
|
||||
|
||||
/**
|
||||
* Find a loaded plugin via its tag
|
||||
*
|
||||
* @param tag
|
||||
* @return
|
||||
*/
|
||||
operator fun get(tag: PluginTag, recursive: Boolean = true): Plugin? = find(recursive) { tag.matches(it.tag) }
|
||||
|
||||
|
||||
/**
|
||||
* Find a loaded plugin via its class. This method does not check if the result is unique and just returns first
|
||||
* plugin matching the class condition.
|
||||
* For safe search provide a tag since tags are checked on load and plugins with the same tag are not allowed
|
||||
* in the same context.
|
||||
*
|
||||
* @param tag
|
||||
* @param type
|
||||
* @param <T>
|
||||
* @return
|
||||
*/
|
||||
@Suppress("UNCHECKED_CAST")
|
||||
operator fun <T : Any> get(type: KClass<T>, tag: PluginTag? = null, recursive: Boolean = true): T? =
|
||||
find(recursive) { type.isInstance(it) && (tag == null || tag.matches(it.tag)) } as T?
|
||||
|
||||
inline operator fun <reified T : Any> get(tag: PluginTag? = null, recursive: Boolean = true): T? =
|
||||
get(T::class, tag, recursive)
|
||||
|
||||
inline operator fun <reified T : Plugin> get(factory: PluginFactory<T>, recursive: Boolean = true): T? =
|
||||
get(factory.type, factory.tag, recursive)
|
||||
|
||||
/**
|
||||
* Load given plugin into this manager and return loaded instance.
|
||||
* Throw error if plugin of the same type and tag already exists in manager.
|
||||
*
|
||||
* @param plugin
|
||||
* @return
|
||||
*/
|
||||
fun <T : Plugin> load(plugin: T): T {
|
||||
if (context.isActive) error("Can't load plugin into active context")
|
||||
|
||||
if (get(plugin::class, plugin.tag, recursive = false) != null) {
|
||||
error("Plugin of type ${plugin::class} already exists in ${context.name}")
|
||||
} else {
|
||||
for (tag in plugin.dependsOn()) {
|
||||
fetch(tag, true)
|
||||
}
|
||||
|
||||
logger.info { "Loading plugin ${plugin.name} into ${context.name}" }
|
||||
plugin.attach(context)
|
||||
plugins.add(plugin)
|
||||
return plugin
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Load a plugin using its factory
|
||||
*/
|
||||
fun <T : Plugin> load(factory: PluginFactory<T>, meta: Meta = EmptyMeta): T =
|
||||
load(factory(meta, context))
|
||||
|
||||
fun <T : Plugin> load(factory: PluginFactory<T>, metaBuilder: MetaBuilder.() -> Unit): T =
|
||||
load(factory, buildMeta(metaBuilder))
|
||||
|
||||
/**
|
||||
* Remove a plugin from [PluginManager]
|
||||
*/
|
||||
fun remove(plugin: Plugin) {
|
||||
if (context.isActive) error("Can't remove plugin from active context")
|
||||
|
||||
if (plugins.contains(plugin)) {
|
||||
logger.info { "Removing plugin ${plugin.name} from ${context.name}" }
|
||||
plugin.detach()
|
||||
plugins.remove(plugin)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get an existing plugin with given meta or load new one using provided factory
|
||||
*
|
||||
*/
|
||||
fun <T : Plugin> fetch(factory: PluginFactory<T>, recursive: Boolean = true, meta: Meta = EmptyMeta): T {
|
||||
val loaded = get(factory.type, factory.tag, recursive)
|
||||
return when {
|
||||
loaded == null -> load(factory(meta, context))
|
||||
loaded.meta == meta -> loaded // if meta is the same, return existing plugin
|
||||
else -> throw RuntimeException("Can't load plugin with tag ${factory.tag}. Plugin with this tag and different configuration already exists in context.")
|
||||
}
|
||||
}
|
||||
|
||||
fun <T : Plugin> fetch(
|
||||
factory: PluginFactory<T>,
|
||||
recursive: Boolean = true,
|
||||
metaBuilder: MetaBuilder.() -> Unit
|
||||
): T = fetch(factory, recursive, buildMeta(metaBuilder))
|
||||
|
||||
override fun iterator(): Iterator<Plugin> = plugins.iterator()
|
||||
|
||||
}
|
@ -1,49 +0,0 @@
|
||||
package hep.dataforge.context
|
||||
|
||||
import hep.dataforge.meta.EmptyMeta
|
||||
import hep.dataforge.meta.Meta
|
||||
import kotlin.reflect.KClass
|
||||
|
||||
interface PluginFactory<T : Plugin> : Factory<T> {
|
||||
val tag: PluginTag
|
||||
val type: KClass<out T>
|
||||
}
|
||||
|
||||
expect object PluginRepository {
|
||||
|
||||
fun register(factory: PluginFactory<*>)
|
||||
|
||||
/**
|
||||
* List plugins available in the repository
|
||||
*/
|
||||
fun list(): Sequence<PluginFactory<*>>
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch specific plugin and instantiate it with given meta
|
||||
*/
|
||||
fun PluginRepository.fetch(tag: PluginTag, meta: Meta = EmptyMeta): Plugin =
|
||||
list().find { it.tag.matches(tag) }?.invoke(meta = meta)
|
||||
?: error("Plugin with tag $tag not found in the repository")
|
||||
|
||||
fun <T : Plugin> PluginRepository.register(
|
||||
tag: PluginTag,
|
||||
type: KClass<out T>,
|
||||
constructor: (Context, Meta) -> T
|
||||
): PluginFactory<T> {
|
||||
val factory = object : PluginFactory<T> {
|
||||
override val tag: PluginTag = tag
|
||||
override val type: KClass<out T> = type
|
||||
|
||||
override fun invoke(meta: Meta, context: Context): T = constructor(context, meta)
|
||||
|
||||
}
|
||||
register(factory)
|
||||
return factory
|
||||
}
|
||||
|
||||
inline fun <reified T : Plugin> PluginRepository.register(tag: PluginTag, noinline constructor: (Context, Meta) -> T) =
|
||||
register(tag, T::class, constructor)
|
||||
|
||||
fun PluginRepository.register(plugin: Plugin) = register(plugin.tag, plugin::class) { _, _ -> plugin }
|
@ -1,75 +0,0 @@
|
||||
/*
|
||||
* Copyright 2015 Alexander Nozik.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package hep.dataforge.provider
|
||||
|
||||
import hep.dataforge.names.Name
|
||||
import hep.dataforge.names.toName
|
||||
|
||||
/**
|
||||
*
|
||||
*
|
||||
* Path interface.
|
||||
*
|
||||
* @author Alexander Nozik
|
||||
* @version $Id: $Id
|
||||
*/
|
||||
inline class Path(val tokens: List<PathToken>) : Iterable<PathToken> {
|
||||
|
||||
val head: PathToken? get() = tokens.firstOrNull()
|
||||
|
||||
val length: Int get() = tokens.size
|
||||
|
||||
/**
|
||||
* Returns non-empty optional containing the chain without first segment in case of chain path.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
val tail: Path? get() = if (tokens.isEmpty()) null else Path(tokens.drop(1))
|
||||
|
||||
override fun iterator(): Iterator<PathToken> = tokens.iterator()
|
||||
|
||||
companion object {
|
||||
const val PATH_SEGMENT_SEPARATOR = "/"
|
||||
|
||||
fun parse(path: String): Path {
|
||||
val head = path.substringBefore(PATH_SEGMENT_SEPARATOR)
|
||||
val tail = path.substringAfter(PATH_SEGMENT_SEPARATOR)
|
||||
return PathToken.parse(head).toPath() + parse(tail)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
operator fun Path.plus(path: Path) = Path(this.tokens + path.tokens)
|
||||
|
||||
data class PathToken(val name: Name, val target: String? = null) {
|
||||
override fun toString(): String = if (target == null) {
|
||||
name.toString()
|
||||
} else {
|
||||
"$target$TARGET_SEPARATOR$name"
|
||||
}
|
||||
|
||||
companion object {
|
||||
const val TARGET_SEPARATOR = "::"
|
||||
fun parse(token: String): PathToken {
|
||||
val target = token.substringBefore(TARGET_SEPARATOR, "")
|
||||
val name = token.substringAfter(TARGET_SEPARATOR).toName()
|
||||
if (target.contains("[")) TODO("target separators in queries are not supported")
|
||||
return PathToken(name, target)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fun PathToken.toPath() = Path(listOf(this))
|
@ -0,0 +1,57 @@
|
||||
package space.kscience.dataforge.context
|
||||
|
||||
import space.kscience.dataforge.meta.Meta
|
||||
import space.kscience.dataforge.misc.DFInternal
|
||||
import space.kscience.dataforge.misc.Named
|
||||
import space.kscience.dataforge.names.Name
|
||||
import kotlin.properties.ReadOnlyProperty
|
||||
import kotlin.reflect.KClass
|
||||
import kotlin.reflect.KProperty
|
||||
|
||||
public abstract class AbstractPlugin(override val meta: Meta = Meta.EMPTY) : Plugin {
|
||||
private var _context: Context? = null
|
||||
private val dependencies = HashMap<PluginFactory<*>, Meta>()
|
||||
|
||||
override val isAttached: Boolean get() = _context != null
|
||||
|
||||
override val context: Context
|
||||
get() = _context ?: error("Plugin $tag is not attached")
|
||||
|
||||
override fun attach(context: Context) {
|
||||
this._context = context
|
||||
}
|
||||
|
||||
override fun detach() {
|
||||
this._context = null
|
||||
}
|
||||
|
||||
override fun dependsOn(): Map<PluginFactory<*>, Meta> = dependencies
|
||||
|
||||
protected fun <P : Plugin> require(
|
||||
factory: PluginFactory<P>,
|
||||
type: KClass<P>,
|
||||
meta: Meta = Meta.EMPTY,
|
||||
): ReadOnlyProperty<AbstractPlugin, P> {
|
||||
dependencies[factory] = meta
|
||||
return PluginDependencyDelegate(factory, type)
|
||||
}
|
||||
|
||||
/**
|
||||
* Register plugin dependency and return a delegate which provides lazily initialized reference to dependent plugin
|
||||
*/
|
||||
protected inline fun <reified P : Plugin> require(
|
||||
factory: PluginFactory<P>,
|
||||
meta: Meta = Meta.EMPTY,
|
||||
): ReadOnlyProperty<AbstractPlugin, P> = require(factory, P::class, meta)
|
||||
}
|
||||
|
||||
public fun <T : Named> Collection<T>.associateByName(): Map<Name, T> = associate { it.name to it }
|
||||
|
||||
private class PluginDependencyDelegate<P : Plugin>(val factory: PluginFactory<P>, val type: KClass<P>) :
|
||||
ReadOnlyProperty<AbstractPlugin, P> {
|
||||
@OptIn(DFInternal::class)
|
||||
override fun getValue(thisRef: AbstractPlugin, property: KProperty<*>): P {
|
||||
if (!thisRef.isAttached) error("Plugin dependency must not be called eagerly during initialization.")
|
||||
return thisRef.context.plugins.getByType(type, factory.tag) ?: error("Plugin ${factory.tag} not found")
|
||||
}
|
||||
}
|
@ -0,0 +1,119 @@
|
||||
package space.kscience.dataforge.context
|
||||
|
||||
import kotlinx.coroutines.CoroutineScope
|
||||
import kotlinx.coroutines.Job
|
||||
import kotlinx.coroutines.SupervisorJob
|
||||
import space.kscience.dataforge.meta.*
|
||||
import space.kscience.dataforge.misc.DFExperimental
|
||||
import space.kscience.dataforge.misc.Named
|
||||
import space.kscience.dataforge.misc.ThreadSafe
|
||||
import space.kscience.dataforge.names.Name
|
||||
import space.kscience.dataforge.provider.Provider
|
||||
import kotlin.coroutines.CoroutineContext
|
||||
|
||||
/**
|
||||
* The local environment for anything being done in DataForge framework. Contexts are organized into tree structure with [Global] at the top.
|
||||
* Context has [properties] - equivalent for system environment values, but grouped into a tree and inherited from parent context.
|
||||
*
|
||||
* The main function of the Context is to provide [PluginManager] which stores the loaded plugins and works as a dependency injection point.
|
||||
* The normal behaviour of the [PluginManager] is to search for a plugin in parent context if it is not found in a current one. It is possible to have
|
||||
* different plugins with the same interface in different contexts in the hierarchy. The usual behaviour is to use nearest one, but it could
|
||||
* be overridden by plugin implementation.
|
||||
*
|
||||
*/
|
||||
public open class Context internal constructor(
|
||||
final override val name: Name,
|
||||
public val parent: Context?,
|
||||
plugins: Set<Plugin>, // set of unattached plugins
|
||||
meta: Meta,
|
||||
) : Named, MetaRepr, Provider, CoroutineScope {
|
||||
|
||||
/**
|
||||
* Context properties. Working as substitute for environment variables
|
||||
*/
|
||||
public val properties: Laminate = if (parent == null) {
|
||||
Laminate(meta)
|
||||
} else {
|
||||
Laminate(meta, parent.properties)
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* A [PluginManager] for current context
|
||||
*/
|
||||
public val plugins: PluginManager by lazy { PluginManager(this, plugins) }
|
||||
|
||||
override val defaultTarget: String get() = Plugin.TARGET
|
||||
|
||||
public fun content(target: String, inherit: Boolean): Map<Name, Any> {
|
||||
return if (inherit) {
|
||||
when (target) {
|
||||
PROPERTY_TARGET -> properties.nodeSequence().toMap()
|
||||
Plugin.TARGET -> plugins.list(true).associateBy { it.name }
|
||||
else -> emptyMap()
|
||||
}
|
||||
} else {
|
||||
when (target) {
|
||||
PROPERTY_TARGET -> properties.layers.firstOrNull()?.nodeSequence()?.toMap() ?: emptyMap()
|
||||
Plugin.TARGET -> plugins.list(false).associateBy { it.name }
|
||||
else -> emptyMap()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
override fun content(target: String): Map<Name, Any> = content(target, true)
|
||||
|
||||
override val coroutineContext: CoroutineContext by lazy {
|
||||
(parent ?: Global).coroutineContext.let { parenContext ->
|
||||
parenContext + SupervisorJob(parenContext[Job])
|
||||
}
|
||||
}
|
||||
|
||||
private val childrenContexts = HashMap<Name, Context>()
|
||||
|
||||
/**
|
||||
* Get and validate existing context or build and register a new child context.
|
||||
* @param name the relative (tail) name of the new context. If null, uses context hash code as a marker.
|
||||
*/
|
||||
@OptIn(DFExperimental::class)
|
||||
@ThreadSafe
|
||||
public fun buildContext(name: Name? = null, block: ContextBuilder.() -> Unit = {}): Context {
|
||||
val existing = name?.let { childrenContexts[name] }
|
||||
return existing?.modify(block) ?: ContextBuilder(this, name).apply(block).build().also {
|
||||
childrenContexts[it.name] = it
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Detach all plugins, and close child contexts
|
||||
*/
|
||||
public open fun close() {
|
||||
//recursively closed child context
|
||||
childrenContexts.forEach { it.value.close() }
|
||||
//detach all plugins
|
||||
plugins.forEach { it.detach() }
|
||||
}
|
||||
|
||||
override fun toMeta(): Meta = Meta {
|
||||
"parent" to parent?.name
|
||||
properties.layers.firstOrNull()?.let { set("properties", it) }
|
||||
"plugins" putIndexed plugins.map { it.toMeta() }
|
||||
}
|
||||
|
||||
public companion object {
|
||||
public const val PROPERTY_TARGET: String = "context.property"
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* The interface for something that encapsulated in context
|
||||
*
|
||||
*/
|
||||
public interface ContextAware {
|
||||
/**
|
||||
* Get context for this object
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public val context: Context
|
||||
}
|
@ -0,0 +1,111 @@
|
||||
package space.kscience.dataforge.context
|
||||
|
||||
import space.kscience.dataforge.meta.Meta
|
||||
import space.kscience.dataforge.meta.MutableMeta
|
||||
import space.kscience.dataforge.meta.seal
|
||||
import space.kscience.dataforge.meta.toMutableMeta
|
||||
import space.kscience.dataforge.misc.DFBuilder
|
||||
import space.kscience.dataforge.misc.DFExperimental
|
||||
import space.kscience.dataforge.names.Name
|
||||
import space.kscience.dataforge.names.NameToken
|
||||
import space.kscience.dataforge.names.asName
|
||||
import space.kscience.dataforge.names.plus
|
||||
import kotlin.collections.component1
|
||||
import kotlin.collections.component2
|
||||
import kotlin.collections.set
|
||||
|
||||
/**
|
||||
* A convenience builder for context
|
||||
*/
|
||||
@DFBuilder
|
||||
public class ContextBuilder internal constructor(
|
||||
private val parent: Context,
|
||||
public val name: Name? = null,
|
||||
meta: Meta = Meta.EMPTY,
|
||||
) {
|
||||
internal val factories = HashMap<PluginFactory<*>, Meta>()
|
||||
internal var meta = meta.toMutableMeta()
|
||||
|
||||
public fun properties(action: MutableMeta.() -> Unit) {
|
||||
meta.action()
|
||||
}
|
||||
|
||||
@OptIn(DFExperimental::class)
|
||||
private fun findPluginFactory(tag: PluginTag): PluginFactory<*> =
|
||||
parent.gatherInSequence<PluginFactory<*>>(PluginFactory.TYPE).values
|
||||
.find { it.tag.matches(tag) } ?: error("Can't resolve plugin factory for $tag")
|
||||
|
||||
public fun plugin(tag: PluginTag, mutableMeta: MutableMeta.() -> Unit = {}) {
|
||||
val factory = findPluginFactory(tag)
|
||||
factories[factory] = Meta(mutableMeta)
|
||||
}
|
||||
|
||||
public fun plugin(factory: PluginFactory<*>, meta: Meta) {
|
||||
factories[factory] = meta
|
||||
}
|
||||
|
||||
public fun plugin(factory: PluginFactory<*>, mutableMeta: MutableMeta.() -> Unit = {}) {
|
||||
factories[factory] = Meta(mutableMeta)
|
||||
}
|
||||
|
||||
public fun plugin(name: String, group: String = "", version: String = "", action: MutableMeta.() -> Unit = {}) {
|
||||
plugin(PluginTag(name, group, version), action)
|
||||
}
|
||||
|
||||
/**
|
||||
* Add de-facto existing plugin as a dependency
|
||||
*/
|
||||
public fun plugin(plugin: Plugin) {
|
||||
plugin(DeFactoPluginFactory(plugin))
|
||||
}
|
||||
|
||||
public fun build(): Context {
|
||||
val contextName = name ?: NameToken("@auto",hashCode().toUInt().toString(16)).asName()
|
||||
val plugins = HashMap<PluginTag, Plugin>()
|
||||
|
||||
fun addPlugin(factory: PluginFactory<*>, meta: Meta) {
|
||||
val existing = plugins[factory.tag]
|
||||
// Add if does not exist
|
||||
if (existing == null) {
|
||||
//TODO bypass if parent already has plugin with given meta?
|
||||
val plugin = factory.build(parent, meta)
|
||||
|
||||
for ((depFactory, deoMeta) in plugin.dependsOn()) {
|
||||
addPlugin(depFactory, deoMeta)
|
||||
}
|
||||
|
||||
parent.logger.info { "Loading plugin ${plugin.name} into $contextName" }
|
||||
plugins[plugin.tag] = plugin
|
||||
} else if (existing.meta != meta) {
|
||||
error("Plugin with tag ${factory.tag} and meta $meta already exists in $contextName")
|
||||
}
|
||||
//bypass if exists with the same meta
|
||||
}
|
||||
|
||||
factories.forEach { (factory, meta) ->
|
||||
addPlugin(factory, meta)
|
||||
}
|
||||
|
||||
return Context(contextName, parent, plugins.values.toSet(), meta.seal())
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if current context contains all plugins required by the builder and return it does or forks to a new context
|
||||
* if it does not.
|
||||
*/
|
||||
@DFExperimental
|
||||
public fun Context.modify(block: ContextBuilder.() -> Unit): Context {
|
||||
|
||||
fun Context.contains(factory: PluginFactory<*>, meta: Meta): Boolean {
|
||||
val loaded = plugins[factory.tag] ?: return false
|
||||
return loaded.meta == meta
|
||||
}
|
||||
|
||||
val builder = ContextBuilder(this, name + "mod", properties).apply(block)
|
||||
val requiresFork = builder.factories.any { (factory, meta) ->
|
||||
!contains(factory, meta)
|
||||
} || ((properties as Meta) == builder.meta)
|
||||
|
||||
return if (requiresFork) builder.build() else this
|
||||
}
|
@ -0,0 +1,12 @@
|
||||
package space.kscience.dataforge.context
|
||||
|
||||
import space.kscience.dataforge.meta.Meta
|
||||
|
||||
public fun interface Factory<out T> {
|
||||
public fun build(context: Context, meta: Meta): T
|
||||
}
|
||||
|
||||
public operator fun <T> Factory<T>.invoke(
|
||||
meta: Meta = Meta.EMPTY,
|
||||
context: Context = Global,
|
||||
): T = build(context, meta)
|
@ -0,0 +1,24 @@
|
||||
package space.kscience.dataforge.context
|
||||
|
||||
import kotlinx.coroutines.CoroutineName
|
||||
import kotlinx.coroutines.Job
|
||||
import space.kscience.dataforge.meta.Meta
|
||||
import space.kscience.dataforge.names.asName
|
||||
import space.kscience.dataforge.names.parseAsName
|
||||
import kotlin.coroutines.CoroutineContext
|
||||
import kotlin.native.concurrent.ThreadLocal
|
||||
|
||||
internal expect fun getGlobalLoggerFactory(): PluginFactory<out LogManager>
|
||||
|
||||
/**
|
||||
* A global root context. Closing [Global] terminates the framework.
|
||||
*/
|
||||
@ThreadLocal
|
||||
private object GlobalContext : Context("GLOBAL".asName(), null, emptySet(), Meta.EMPTY) {
|
||||
override val coroutineContext: CoroutineContext = Job() + CoroutineName("GlobalContext")
|
||||
}
|
||||
|
||||
public val Global: Context get() = GlobalContext
|
||||
|
||||
public fun Context(name: String? = null, block: ContextBuilder.() -> Unit = {}): Context =
|
||||
Global.buildContext(name?.parseAsName(), block)
|
@ -0,0 +1,89 @@
|
||||
package space.kscience.dataforge.context
|
||||
|
||||
import space.kscience.dataforge.meta.Meta
|
||||
import space.kscience.dataforge.misc.Named
|
||||
import space.kscience.dataforge.names.Name
|
||||
import space.kscience.dataforge.names.plus
|
||||
|
||||
public fun interface Logger {
|
||||
public fun log(tag: String, body: () -> String)
|
||||
}
|
||||
|
||||
public interface LogManager : Plugin, Logger {
|
||||
public fun logger(name: Name): Logger
|
||||
|
||||
public val defaultLogger: Logger
|
||||
|
||||
override fun log(tag: String, body: () -> String): Unit = defaultLogger.log(tag, body)
|
||||
|
||||
public fun log(name: Name, tag: String, body: () -> String): Unit = logger(name).log(tag, body)
|
||||
|
||||
public companion object {
|
||||
public const val TRACE: String = "TRACE"
|
||||
public const val INFO: String = "INFO"
|
||||
public const val DEBUG: String = "DEBUG"
|
||||
public const val WARNING: String = "WARNING"
|
||||
public const val ERROR: String = "ERROR"
|
||||
}
|
||||
}
|
||||
|
||||
public fun Logger.trace(body: () -> String): Unit = log(LogManager.TRACE, body)
|
||||
public fun Logger.info(body: () -> String): Unit = log(LogManager.INFO, body)
|
||||
public fun Logger.debug(body: () -> String): Unit = log(LogManager.DEBUG, body)
|
||||
public fun Logger.warn(body: () -> String): Unit = log(LogManager.WARNING, body)
|
||||
public fun Logger.error(body: () -> String): Unit = log(LogManager.ERROR, body)
|
||||
|
||||
internal val (() -> String).safe: String
|
||||
get() = try {
|
||||
invoke()
|
||||
} catch (t: Throwable) {
|
||||
"Error while evaluating log string: ${t.message}"
|
||||
}
|
||||
|
||||
|
||||
public fun Logger.error(throwable: Throwable?, body: () -> String): Unit = log(LogManager.ERROR) {
|
||||
buildString {
|
||||
appendLine(body())
|
||||
throwable?.let { appendLine(throwable.stackTraceToString()) }
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public class DefaultLogManager : AbstractPlugin(), LogManager {
|
||||
|
||||
override fun logger(name: Name): Logger = Logger { tag, body ->
|
||||
val message: String = body.safe
|
||||
println("$tag $name: [${context.name}] $message")
|
||||
}
|
||||
|
||||
override val defaultLogger: Logger = logger(Name.EMPTY)
|
||||
|
||||
|
||||
override val tag: PluginTag get() = Companion.tag
|
||||
|
||||
public companion object : PluginFactory<DefaultLogManager> {
|
||||
override fun build(context: Context, meta: Meta): DefaultLogManager = DefaultLogManager()
|
||||
|
||||
override val tag: PluginTag = PluginTag(group = PluginTag.DATAFORGE_GROUP, name = "log.default")
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Context log manager inherited from parent
|
||||
*/
|
||||
public val Context.logger: LogManager
|
||||
get() = plugins.find(inherit = true) { it is LogManager } as? LogManager
|
||||
?: getGlobalLoggerFactory().build(context = Global, meta = Meta.EMPTY).apply { attach(Global) }
|
||||
|
||||
/**
|
||||
* The named proxy logger for a context member
|
||||
*/
|
||||
public val ContextAware.logger: Logger
|
||||
get() = if (this is Named) {
|
||||
Logger { tag, body ->
|
||||
context.logger.log(this@logger.name + name, tag, body)
|
||||
}
|
||||
} else {
|
||||
context.logger
|
||||
}
|
||||
|
@ -1,80 +1,68 @@
|
||||
package hep.dataforge.context
|
||||
package space.kscience.dataforge.context
|
||||
|
||||
import hep.dataforge.meta.Meta
|
||||
import hep.dataforge.meta.MetaRepr
|
||||
import hep.dataforge.meta.buildMeta
|
||||
import hep.dataforge.names.Name
|
||||
import hep.dataforge.names.toName
|
||||
import hep.dataforge.provider.Provider
|
||||
import space.kscience.dataforge.context.Plugin.Companion.TARGET
|
||||
import space.kscience.dataforge.meta.Meta
|
||||
import space.kscience.dataforge.meta.MetaRepr
|
||||
import space.kscience.dataforge.misc.DfId
|
||||
import space.kscience.dataforge.misc.Named
|
||||
import space.kscience.dataforge.names.Name
|
||||
import space.kscience.dataforge.names.parseAsName
|
||||
import space.kscience.dataforge.provider.Provider
|
||||
|
||||
/**
|
||||
* The interface to define a Context plugin. A plugin stores all runtime features of a context.
|
||||
* The plugin is by default configurable and a Provider (both features could be ignored).
|
||||
* The plugin must in most cases have an empty constructor in order to be able to load it from library.
|
||||
*
|
||||
*
|
||||
* The plugin lifecycle is the following:
|
||||
*
|
||||
*
|
||||
* create - configure - attach - detach - destroy
|
||||
*
|
||||
*
|
||||
* Configuration of attached plugin is possible for a context which is not in a runtime mode, but it is not recommended.
|
||||
*
|
||||
* @author Alexander Nozik
|
||||
*/
|
||||
interface Plugin : Named, ContextAware, Provider, MetaRepr {
|
||||
@DfId(TARGET)
|
||||
public interface Plugin : Named, ContextAware, Provider, MetaRepr {
|
||||
|
||||
/**
|
||||
* Get tag for this plugin
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
val tag: PluginTag
|
||||
public val tag: PluginTag
|
||||
|
||||
val meta: Meta
|
||||
public val meta: Meta
|
||||
|
||||
/**
|
||||
* The name of this plugin ignoring version and group
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
override val name: Name get() = tag.name.toName()
|
||||
override val name: Name get() = tag.name.parseAsName()
|
||||
|
||||
/**
|
||||
* Plugin dependencies which are required to attach this plugin. Plugin
|
||||
* dependencies must be initialized and enabled in the Context before this
|
||||
* plugin is enabled.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
fun dependsOn(): Collection<PluginFactory<*>>
|
||||
public fun dependsOn(): Map<PluginFactory<*>, Meta>
|
||||
|
||||
/**
|
||||
* Start this plugin and attach registration info to the context. This method
|
||||
* should be called only via PluginManager to avoid dependency issues.
|
||||
*
|
||||
* @param context
|
||||
*/
|
||||
fun attach(context: Context)
|
||||
public fun attach(context: Context)
|
||||
|
||||
/**
|
||||
* Stop this plugin and remove registration info from context and other
|
||||
* plugins. This method should be called only via PluginManager to avoid
|
||||
* dependency issues.
|
||||
*/
|
||||
fun detach()
|
||||
public fun detach()
|
||||
|
||||
override fun toMeta(): Meta = buildMeta {
|
||||
public val isAttached: Boolean
|
||||
|
||||
override fun toMeta(): Meta = Meta {
|
||||
"context" put context.name.toString()
|
||||
"type" to this::class.simpleName
|
||||
"tag" put tag
|
||||
"meta" put meta
|
||||
}
|
||||
|
||||
companion object {
|
||||
|
||||
const val PLUGIN_TARGET = "plugin"
|
||||
public companion object {
|
||||
public const val TARGET: String = "plugin"
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,57 @@
|
||||
package space.kscience.dataforge.context
|
||||
|
||||
import space.kscience.dataforge.meta.Meta
|
||||
import space.kscience.dataforge.misc.Named
|
||||
import space.kscience.dataforge.names.Name
|
||||
|
||||
/**
|
||||
* A convenience factory to build simple plugins
|
||||
*/
|
||||
public class PluginBuilder(
|
||||
name: String,
|
||||
group: String = "",
|
||||
version: String = "",
|
||||
) {
|
||||
public val tag: PluginTag = PluginTag(name, group, version)
|
||||
|
||||
private val content = HashMap<String, MutableMap<Name, Any>>()
|
||||
private val dependencies = HashMap<PluginFactory<*>, Meta>()
|
||||
|
||||
public fun requires(
|
||||
factory: PluginFactory<*>,
|
||||
meta: Meta = Meta.EMPTY,
|
||||
) {
|
||||
dependencies[factory] = meta
|
||||
}
|
||||
|
||||
public fun provides(target: String, items: Map<Name, Any>) {
|
||||
content.getOrPut(target) { HashMap() }.putAll(items)
|
||||
}
|
||||
|
||||
public fun provides(target: String, vararg items: Named) {
|
||||
provides(target, items.associateBy { it.name })
|
||||
}
|
||||
|
||||
public fun build(): PluginFactory<*> {
|
||||
|
||||
return object : PluginFactory<Plugin> {
|
||||
override val tag: PluginTag get() = this@PluginBuilder.tag
|
||||
|
||||
override fun build(context: Context, meta: Meta): Plugin = object : AbstractPlugin() {
|
||||
override val tag: PluginTag get() = this@PluginBuilder.tag
|
||||
|
||||
override fun content(target: String): Map<Name, Any> = this@PluginBuilder.content[target] ?: emptyMap()
|
||||
|
||||
override fun dependsOn(): Map<PluginFactory<*>, Meta> = this@PluginBuilder.dependencies
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public fun PluginFactory(
|
||||
name: String,
|
||||
group: String = "",
|
||||
version: String = "",
|
||||
block: PluginBuilder.() -> Unit,
|
||||
): PluginFactory<*> = PluginBuilder(name, group, version).apply(block).build()
|
@ -0,0 +1,21 @@
|
||||
package space.kscience.dataforge.context
|
||||
|
||||
import space.kscience.dataforge.meta.Meta
|
||||
import space.kscience.dataforge.misc.DfId
|
||||
|
||||
@DfId(PluginFactory.TYPE)
|
||||
public interface PluginFactory<T : Plugin> : Factory<T> {
|
||||
public val tag: PluginTag
|
||||
|
||||
public companion object {
|
||||
public const val TYPE: String = "pluginFactory"
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Plugin factory created for the specific actual plugin
|
||||
*/
|
||||
internal class DeFactoPluginFactory<T : Plugin>(val plugin: T) : PluginFactory<T> {
|
||||
override fun build(context: Context, meta: Meta): T = plugin
|
||||
override val tag: PluginTag get() = plugin.tag
|
||||
}
|
@ -0,0 +1,100 @@
|
||||
package space.kscience.dataforge.context
|
||||
|
||||
import space.kscience.dataforge.meta.Meta
|
||||
import space.kscience.dataforge.misc.DFInternal
|
||||
import space.kscience.dataforge.names.plus
|
||||
import kotlin.reflect.KClass
|
||||
import kotlin.reflect.cast
|
||||
|
||||
|
||||
/**
|
||||
* The manager for plugin system. Should monitor plugin dependencies and locks.
|
||||
*
|
||||
* @property context A context for this plugin manager
|
||||
* @author Alexander Nozik
|
||||
*/
|
||||
public class PluginManager internal constructor(
|
||||
override val context: Context,
|
||||
private val plugins: Set<Plugin>,
|
||||
) : ContextAware, Iterable<Plugin> {
|
||||
|
||||
init {
|
||||
plugins.forEach { it.attach(context) }
|
||||
}
|
||||
|
||||
/**
|
||||
* A [PluginManager] of parent context if it is present
|
||||
*/
|
||||
private val parent: PluginManager? = context.parent?.plugins
|
||||
|
||||
/**
|
||||
* List plugins stored in this [PluginManager]. If [inherit] is true, include parent plugins as well
|
||||
*/
|
||||
public fun list(inherit: Boolean): Collection<Plugin> {
|
||||
return if (inherit && parent != null) {
|
||||
plugins + parent.list(true)
|
||||
} else {
|
||||
plugins
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get existing plugin or return null if not present. Only first matching plugin is returned.
|
||||
* @param inherit search for parent [PluginManager] plugins
|
||||
* @param predicate condition for the plugin
|
||||
*/
|
||||
public fun find(inherit: Boolean = true, predicate: (Plugin) -> Boolean): Plugin? =
|
||||
list(inherit).find(predicate)
|
||||
|
||||
/**
|
||||
* Find a loaded plugin via its tag
|
||||
*
|
||||
* @param tag
|
||||
* @return
|
||||
*/
|
||||
public operator fun get(tag: PluginTag, inherit: Boolean = true): Plugin? =
|
||||
find(inherit) { tag.matches(it.tag) }
|
||||
|
||||
/**
|
||||
* Find a loaded plugin via its class. This method does not check if the result is unique and just returns first
|
||||
* plugin matching the class condition.
|
||||
* For safe search provide a tag since tags are checked on load and plugins with the same tag are not allowed
|
||||
* in the same context.
|
||||
*
|
||||
* @param tag
|
||||
* @param type
|
||||
* @param <T>
|
||||
* @return
|
||||
*/
|
||||
@DFInternal
|
||||
public fun <T : Any> getByType(type: KClass<T>, tag: PluginTag? = null, inherit: Boolean = true): T? =
|
||||
find(inherit) { type.isInstance(it) && (tag == null || tag.matches(it.tag)) }?.let { type.cast(it) }
|
||||
|
||||
@OptIn(DFInternal::class)
|
||||
public inline operator fun <reified T : Any> get(tag: PluginTag? = null, recursive: Boolean = true): T? =
|
||||
getByType(T::class, tag, recursive)
|
||||
|
||||
@OptIn(DFInternal::class)
|
||||
public inline operator fun <reified T : Plugin> get(factory: PluginFactory<T>, recursive: Boolean = true): T? =
|
||||
getByType(T::class, factory.tag, recursive)
|
||||
|
||||
override fun iterator(): Iterator<Plugin> = plugins.iterator()
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch a plugin with given meta from the context. If the plugin (with given meta) is already registered, it is returned.
|
||||
* Otherwise, new child context with the plugin is created. In the later case the context could be retrieved from the plugin.
|
||||
*/
|
||||
public inline fun <reified T : Plugin> Context.request(factory: PluginFactory<T>, meta: Meta = Meta.EMPTY): T {
|
||||
val existing = plugins[factory]
|
||||
return if (existing != null && existing.meta == meta) existing
|
||||
else {
|
||||
buildContext(name = this@request.name + factory.tag.name) {
|
||||
plugin(factory, meta)
|
||||
}.plugins[factory]!!
|
||||
}
|
||||
}
|
||||
|
||||
@Deprecated("Replace with request", ReplaceWith("request(factory, meta)"))
|
||||
public inline fun <reified T : Plugin> Context.fetch(factory: PluginFactory<T>, meta: Meta = Meta.EMPTY): T =
|
||||
request(factory, meta)
|
@ -1,8 +1,8 @@
|
||||
package hep.dataforge.context
|
||||
package space.kscience.dataforge.context
|
||||
|
||||
import hep.dataforge.meta.Meta
|
||||
import hep.dataforge.meta.MetaRepr
|
||||
import hep.dataforge.meta.buildMeta
|
||||
import kotlinx.serialization.Serializable
|
||||
import space.kscience.dataforge.meta.Meta
|
||||
import space.kscience.dataforge.meta.MetaRepr
|
||||
|
||||
/**
|
||||
* The tag which contains information about name, group and version of some
|
||||
@ -10,7 +10,8 @@ import hep.dataforge.meta.buildMeta
|
||||
*
|
||||
* @author Alexander Nozik
|
||||
*/
|
||||
data class PluginTag(
|
||||
@Serializable
|
||||
public data class PluginTag(
|
||||
val name: String,
|
||||
val group: String = "",
|
||||
val version: String = ""
|
||||
@ -22,7 +23,7 @@ data class PluginTag(
|
||||
* @param otherTag
|
||||
* @return
|
||||
*/
|
||||
fun matches(otherTag: PluginTag): Boolean {
|
||||
public fun matches(otherTag: PluginTag): Boolean {
|
||||
return matchesName(otherTag) && matchesGroup(otherTag)
|
||||
}
|
||||
|
||||
@ -36,15 +37,15 @@ data class PluginTag(
|
||||
|
||||
override fun toString(): String = listOf(group, name, version).joinToString(separator = ":")
|
||||
|
||||
override fun toMeta(): Meta = buildMeta {
|
||||
override fun toMeta(): Meta = Meta {
|
||||
"name" put name
|
||||
"group" put group
|
||||
"version" put version
|
||||
}
|
||||
|
||||
companion object {
|
||||
public companion object {
|
||||
|
||||
const val DATAFORGE_GROUP = "hep.dataforge"
|
||||
public const val DATAFORGE_GROUP: String = "dataforge"
|
||||
|
||||
/**
|
||||
* Build new PluginTag from standard string representation
|
||||
@ -52,7 +53,7 @@ data class PluginTag(
|
||||
* @param tag
|
||||
* @return
|
||||
*/
|
||||
fun fromString(tag: String): PluginTag {
|
||||
public fun fromString(tag: String): PluginTag {
|
||||
val sepIndex = tag.indexOf(":")
|
||||
return if (sepIndex >= 0) {
|
||||
PluginTag(group = tag.substring(0, sepIndex), name = tag.substring(sepIndex + 1))
|
@ -0,0 +1,97 @@
|
||||
package space.kscience.dataforge.context
|
||||
|
||||
import space.kscience.dataforge.misc.DFExperimental
|
||||
import space.kscience.dataforge.names.Name
|
||||
import space.kscience.dataforge.names.plus
|
||||
import space.kscience.dataforge.provider.Provider
|
||||
import space.kscience.dataforge.provider.top
|
||||
import kotlin.reflect.KClass
|
||||
import kotlin.reflect.cast
|
||||
|
||||
/**
|
||||
* Resolve a specific element in top level elements of the provider and attempt to cast it to the given type
|
||||
*/
|
||||
private fun <T : Any> Provider.provide(target: String, name: Name, type: KClass<out T>): T? {
|
||||
return content(target)[name]?.let { type.cast(it) }
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolve a top level object with given [target] and [name] in a [Context] own scope or its plugins.
|
||||
*/
|
||||
public fun <T : Any> Context.resolve(target: String, name: Name, type: KClass<out T>): T? {
|
||||
//Try searching for plugin an context property
|
||||
provide(target, name, type)?.let { return it }
|
||||
val pluginContent = plugins.mapNotNull { it.provide(target, name, type) }
|
||||
return if (pluginContent.isEmpty()) {
|
||||
parent?.resolve<T>(target, name, type)
|
||||
} else {
|
||||
pluginContent.single() // throws error in case of name/type conflicts
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolve a top level object with given [target] and [name] in a [Context] own scope or its plugins.
|
||||
*/
|
||||
public inline fun <reified T : Any> Context.resolve(target: String, name: Name): T? =
|
||||
resolve(target, name, T::class)
|
||||
|
||||
/**
|
||||
* Gather a map of all top-level objects with given [target] from context plugins.
|
||||
* Content from plugins is prefixed by plugin name so name conflicts are impossible
|
||||
* This operation could be slow in case of large number of plugins
|
||||
*/
|
||||
public fun <T : Any> Context.gather(
|
||||
target: String,
|
||||
type: KClass<out T>,
|
||||
inherit: Boolean = true,
|
||||
): Map<Name, T> = buildMap {
|
||||
putAll(top(target, type))
|
||||
plugins.forEach { plugin ->
|
||||
plugin.top(target, type).forEach { (name, value) ->
|
||||
val itemName = plugin.name + name
|
||||
if (containsKey(itemName)) error("Name conflict during gather. An item with name $name could not be gathered from $plugin because key is already present.")
|
||||
put(itemName, value)
|
||||
}
|
||||
}
|
||||
if (inherit) {
|
||||
parent?.gather(target, type, inherit)?.forEach {
|
||||
//put all values from parent if they are not conflicting
|
||||
if (!containsKey(it.key)) {
|
||||
put(it.key, it.value)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public inline fun <reified T : Any> Context.gather(target: String, inherit: Boolean = true): Map<Name, T> =
|
||||
gather(target, T::class, inherit)
|
||||
|
||||
/**
|
||||
* Gather all content from context itself and its plugins in a form of sequence of name-value pairs. Ignores name conflicts.
|
||||
*
|
||||
* Adds parent context sequence as well if [inherit] is true
|
||||
*/
|
||||
@DFExperimental
|
||||
public fun <T : Any> Context.gatherInSequence(
|
||||
target: String,
|
||||
type: KClass<out T>,
|
||||
inherit: Boolean = true,
|
||||
): Sequence<Map.Entry<Name, T>> = sequence {
|
||||
yieldAll(top(target, type).entries)
|
||||
plugins.forEach { plugin ->
|
||||
yieldAll(plugin.top(target, type).mapKeys { plugin.name + it.key }.entries)
|
||||
}
|
||||
if (inherit) {
|
||||
parent?.gather(target, type, inherit)?.let {
|
||||
yieldAll(it.entries)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@DFExperimental
|
||||
public inline fun <reified T : Any> Context.gatherInSequence(
|
||||
target: String,
|
||||
inherit: Boolean = true,
|
||||
): Sequence<Map.Entry<Name, T>> = gatherInSequence(target, T::class, inherit)
|
||||
|
||||
public val <T> Sequence<Map.Entry<Name, T>>.values: Sequence<T> get() = map { it.value }
|
@ -0,0 +1,35 @@
|
||||
package space.kscience.dataforge.properties
|
||||
|
||||
|
||||
import space.kscience.dataforge.meta.Meta
|
||||
import space.kscience.dataforge.meta.ObservableMutableMeta
|
||||
import space.kscience.dataforge.meta.transformations.MetaConverter
|
||||
import space.kscience.dataforge.meta.transformations.nullableMetaToObject
|
||||
import space.kscience.dataforge.meta.transformations.nullableObjectToMeta
|
||||
import space.kscience.dataforge.misc.DFExperimental
|
||||
import space.kscience.dataforge.names.Name
|
||||
import space.kscience.dataforge.names.startsWith
|
||||
|
||||
@DFExperimental
|
||||
public class MetaProperty<T : Any>(
|
||||
public val meta: ObservableMutableMeta,
|
||||
public val name: Name,
|
||||
public val converter: MetaConverter<T>,
|
||||
) : Property<T?> {
|
||||
|
||||
override var value: T?
|
||||
get() = converter.nullableMetaToObject(meta[name])
|
||||
set(value) {
|
||||
meta[name] = converter.nullableObjectToMeta(value) ?: Meta.EMPTY
|
||||
}
|
||||
|
||||
override fun onChange(owner: Any?, callback: (T?) -> Unit) {
|
||||
meta.onChange(owner) { name ->
|
||||
if (name.startsWith(this@MetaProperty.name)) callback(converter.nullableMetaToObject(this[name]))
|
||||
}
|
||||
}
|
||||
|
||||
override fun removeChangeListener(owner: Any?) {
|
||||
meta.removeListener(owner)
|
||||
}
|
||||
}
|
@ -0,0 +1,47 @@
|
||||
package space.kscience.dataforge.properties
|
||||
|
||||
import kotlinx.coroutines.ExperimentalCoroutinesApi
|
||||
import kotlinx.coroutines.flow.MutableStateFlow
|
||||
import kotlinx.coroutines.flow.StateFlow
|
||||
import space.kscience.dataforge.misc.DFExperimental
|
||||
|
||||
@DFExperimental
|
||||
public interface Property<T> {
|
||||
public var value: T
|
||||
|
||||
public fun onChange(owner: Any? = null, callback: (T) -> Unit)
|
||||
public fun removeChangeListener(owner: Any? = null)
|
||||
}
|
||||
|
||||
@DFExperimental
|
||||
@OptIn(ExperimentalCoroutinesApi::class)
|
||||
public fun <T> Property<T>.toFlow(): StateFlow<T> = MutableStateFlow(value).also { stateFlow ->
|
||||
onChange {
|
||||
stateFlow.value = it
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Reflect all changes in the [source] property onto this property. Does not reflect changes back.
|
||||
*
|
||||
* @return a mirroring job
|
||||
*/
|
||||
@DFExperimental
|
||||
public fun <T> Property<T>.mirror(source: Property<T>) {
|
||||
source.onChange(this) {
|
||||
this.value = it
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Bi-directional connection between properties
|
||||
*/
|
||||
@DFExperimental
|
||||
public fun <T> Property<T>.bind(other: Property<T>) {
|
||||
onChange(other) {
|
||||
other.value = it
|
||||
}
|
||||
other.onChange {
|
||||
this.value = it
|
||||
}
|
||||
}
|
@ -0,0 +1,31 @@
|
||||
package space.kscience.dataforge.properties
|
||||
|
||||
|
||||
import space.kscience.dataforge.meta.Scheme
|
||||
import space.kscience.dataforge.misc.DFExperimental
|
||||
import space.kscience.dataforge.names.parseAsName
|
||||
import space.kscience.dataforge.names.startsWith
|
||||
import kotlin.reflect.KMutableProperty1
|
||||
|
||||
@DFExperimental
|
||||
public fun <S : Scheme, T : Any> S.property(property: KMutableProperty1<S, T?>): Property<T?> =
|
||||
object : Property<T?> {
|
||||
override var value: T?
|
||||
get() = property.get(this@property)
|
||||
set(value) {
|
||||
property.set(this@property, value)
|
||||
}
|
||||
|
||||
override fun onChange(owner: Any?, callback: (T?) -> Unit) {
|
||||
this@property.meta.onChange(this) { name ->
|
||||
if (name.startsWith(property.name.parseAsName(true))) {
|
||||
callback(property.get(this@property))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
override fun removeChangeListener(owner: Any?) {
|
||||
this@property.meta.removeListener(this@property)
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,92 @@
|
||||
/*
|
||||
* Copyright 2015 Alexander Nozik.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package space.kscience.dataforge.provider
|
||||
|
||||
import space.kscience.dataforge.names.Name
|
||||
import space.kscience.dataforge.names.parseAsName
|
||||
import kotlin.jvm.JvmInline
|
||||
|
||||
/**
|
||||
* Path interface.
|
||||
*
|
||||
*/
|
||||
@JvmInline
|
||||
public value class Path(public val tokens: List<PathToken>) : Iterable<PathToken> {
|
||||
|
||||
override fun iterator(): Iterator<PathToken> = tokens.iterator()
|
||||
|
||||
override fun toString(): String = tokens.joinToString(separator = PATH_SEGMENT_SEPARATOR)
|
||||
|
||||
public companion object {
|
||||
public const val PATH_SEGMENT_SEPARATOR: String = "/"
|
||||
|
||||
public fun parse(path: String): Path = Path(path.split(PATH_SEGMENT_SEPARATOR).map { PathToken.parse(it) })
|
||||
}
|
||||
}
|
||||
|
||||
public val Path.length: Int get() = tokens.size
|
||||
|
||||
public val Path.head: PathToken? get() = tokens.firstOrNull()
|
||||
|
||||
|
||||
/**
|
||||
* Returns non-empty optional containing the chain without first segment in case of chain path.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public val Path.tail: Path? get() = if (tokens.isEmpty()) null else Path(tokens.drop(1))
|
||||
|
||||
|
||||
public operator fun Path.plus(path: Path): Path = Path(this.tokens + path.tokens)
|
||||
|
||||
public data class PathToken(val name: Name, val target: String? = null) {
|
||||
override fun toString(): String = if (target == null) {
|
||||
name.toString()
|
||||
} else {
|
||||
"$target$TARGET_SEPARATOR$name"
|
||||
}
|
||||
|
||||
public companion object {
|
||||
public const val TARGET_SEPARATOR: String = "::"
|
||||
|
||||
public fun parse(token: String, cache: Boolean = false): PathToken {
|
||||
val target = token.substringBefore(TARGET_SEPARATOR, "")
|
||||
val name = token.substringAfter(TARGET_SEPARATOR).parseAsName(cache)
|
||||
if (target.contains("[")) TODO("target separators in queries are not supported")
|
||||
return PathToken(name, target)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Represent this path token as full path
|
||||
*/
|
||||
public fun PathToken.asPath(): Path = Path(listOf(this))
|
||||
|
||||
/**
|
||||
* Represent a name with optional [target] as a [Path]
|
||||
*/
|
||||
public fun Name.asPath(target: String? = null): Path = PathToken(this, target).asPath()
|
||||
|
||||
/**
|
||||
* Build a path from given names using default targets
|
||||
*/
|
||||
public fun Path(vararg names: Name): Path = Path(names.map { PathToken(it) })
|
||||
|
||||
/**
|
||||
* Use an array of [Name]-target pairs to construct segmented [Path]
|
||||
*/
|
||||
public fun Path(vararg tokens: Pair<Name, String?>): Path = Path(tokens.map { PathToken(it.first, it.second) })
|
@ -13,44 +13,40 @@
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package hep.dataforge.provider
|
||||
package space.kscience.dataforge.provider
|
||||
|
||||
import hep.dataforge.names.Name
|
||||
import hep.dataforge.names.toName
|
||||
import space.kscience.dataforge.names.Name
|
||||
import kotlin.reflect.KClass
|
||||
import kotlin.reflect.safeCast
|
||||
|
||||
/**
|
||||
* A marker utility interface for providers.
|
||||
*
|
||||
* @author Alexander Nozik
|
||||
*/
|
||||
interface Provider {
|
||||
public interface Provider {
|
||||
|
||||
/**
|
||||
* Default target for this provider
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
val defaultTarget: String get() = ""
|
||||
public val defaultTarget: String get() = ""
|
||||
|
||||
/**
|
||||
* Default target for next chain segment
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
val defaultChainTarget: String get() = ""
|
||||
|
||||
public val defaultChainTarget: String get() = ""
|
||||
|
||||
/**
|
||||
* A map of direct children for specific target
|
||||
*/
|
||||
fun provideTop(target: String): Map<Name, Any>
|
||||
public fun content(target: String): Map<Name, Any> = emptyMap()
|
||||
}
|
||||
|
||||
fun Provider.provide(path: Path, targetOverride: String? = null): Any? {
|
||||
public fun Provider.provide(path: Path, targetOverride: String? = null): Any? {
|
||||
if (path.length == 0) throw IllegalArgumentException("Can't provide by empty path")
|
||||
val first = path.first()
|
||||
val target = targetOverride ?: first.target ?: defaultTarget
|
||||
val res = provideTop(target)[first.name] ?: return null
|
||||
val res = content(target)[first.name] ?: return null
|
||||
return when (path.length) {
|
||||
1 -> res
|
||||
else -> {
|
||||
@ -65,24 +61,27 @@ fun Provider.provide(path: Path, targetOverride: String? = null): Any? {
|
||||
/**
|
||||
* Type checked provide
|
||||
*/
|
||||
inline fun <reified T : Any> Provider.provide(path: String): T? {
|
||||
return provide(Path.parse(path)) as? T
|
||||
public inline fun <reified T : Any> Provider.provide(path: String, targetOverride: String? = null): T? {
|
||||
return provide(Path.parse(path), targetOverride) as? T
|
||||
}
|
||||
//
|
||||
//inline fun <reified T : Any> Provider.provide(target: String, name: Name): T? {
|
||||
// return provide(PathToken(name, target).toPath()) as? T
|
||||
//}
|
||||
|
||||
inline fun <reified T : Any> Provider.provide(target: String, name: Name): T? {
|
||||
return provide(PathToken(name, target).toPath()) as? T
|
||||
}
|
||||
|
||||
inline fun <reified T : Any> Provider.provide(target: String, name: String): T? =
|
||||
provide(target, name.toName())
|
||||
//inline fun <reified T : Any> Provider.provide(target: String, name: String): T? =
|
||||
// provide(target, name.toName())
|
||||
|
||||
/**
|
||||
* Typed top level content
|
||||
*/
|
||||
inline fun <reified T : Any> Provider.top(target: String): Map<Name, T> {
|
||||
return provideTop(target).mapValues {
|
||||
it.value as? T ?: error("The type of element $it is ${it::class} but ${T::class} is expected")
|
||||
}
|
||||
public fun <T : Any> Provider.top(target: String, type: KClass<out T>): Map<Name, T> = content(target).mapValues {
|
||||
type.safeCast(it.value) ?: error("The type of element ${it.value} is ${it.value::class} but $type is expected")
|
||||
}
|
||||
|
||||
/**
|
||||
* Typed top level content
|
||||
*/
|
||||
public inline fun <reified T : Any> Provider.top(target: String ): Map<Name, T> = top(target, T::class)
|
||||
|
||||
|
@ -1,32 +0,0 @@
|
||||
package hep.dataforge.context
|
||||
|
||||
import hep.dataforge.names.Name
|
||||
import hep.dataforge.names.appendLeft
|
||||
import hep.dataforge.names.toName
|
||||
import kotlin.test.Test
|
||||
import kotlin.test.assertEquals
|
||||
|
||||
|
||||
class ContextTest {
|
||||
class DummyPlugin : AbstractPlugin() {
|
||||
override val tag get() = PluginTag("test")
|
||||
|
||||
override fun provideTop(target: String): Map<Name, Any> {
|
||||
return when(target){
|
||||
"test" -> listOf("a", "b", "c.d").associate { it.toName() to it.toName() }
|
||||
else -> emptyMap()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
fun testPluginManager() {
|
||||
Global.plugins.load(DummyPlugin())
|
||||
val members = Global.content<Name>("test")
|
||||
assertEquals(3, members.count())
|
||||
members.forEach {
|
||||
assertEquals(it.key, it.value.appendLeft("test"))
|
||||
}
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,33 @@
|
||||
package space.kscience.dataforge.context
|
||||
|
||||
import space.kscience.dataforge.names.Name
|
||||
import space.kscience.dataforge.names.appendFirst
|
||||
import kotlin.test.Test
|
||||
import kotlin.test.assertEquals
|
||||
|
||||
|
||||
class ContextTest {
|
||||
class DummyPlugin : AbstractPlugin() {
|
||||
override val tag get() = PluginTag("test")
|
||||
|
||||
override fun content(target: String): Map<Name, Any> {
|
||||
return when (target) {
|
||||
"test" -> listOf("a", "b", "c.d").associate { Name.parse(it) to Name.parse(it) }
|
||||
else -> emptyMap()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
fun testPluginManager() {
|
||||
val context = Context("test") {
|
||||
plugin(DummyPlugin())
|
||||
}
|
||||
val members = context.gather<Name>("test")
|
||||
assertEquals(3, members.count())
|
||||
members.forEach {
|
||||
assertEquals(it.key, it.value.appendFirst("test"))
|
||||
}
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,28 @@
|
||||
package space.kscience.dataforge.properties
|
||||
|
||||
import space.kscience.dataforge.meta.Scheme
|
||||
import space.kscience.dataforge.meta.SchemeSpec
|
||||
import space.kscience.dataforge.meta.int
|
||||
import space.kscience.dataforge.misc.DFExperimental
|
||||
import kotlin.test.Test
|
||||
import kotlin.test.assertEquals
|
||||
|
||||
internal class TestScheme : Scheme() {
|
||||
var a by int()
|
||||
var b by int()
|
||||
companion object : SchemeSpec<TestScheme>(::TestScheme)
|
||||
}
|
||||
|
||||
@DFExperimental
|
||||
class MetaPropertiesTest {
|
||||
@Test
|
||||
fun testBinding() {
|
||||
val scheme = TestScheme.empty()
|
||||
val a = scheme.property(TestScheme::a)
|
||||
val b = scheme.property(TestScheme::b)
|
||||
a.bind(b)
|
||||
scheme.a = 2
|
||||
assertEquals(2, scheme.b)
|
||||
assertEquals(2, b.value)
|
||||
}
|
||||
}
|
@ -0,0 +1,14 @@
|
||||
package space.kscience.dataforge.provider
|
||||
|
||||
import kotlin.test.Test
|
||||
import kotlin.test.assertEquals
|
||||
|
||||
class PathTest {
|
||||
@Test
|
||||
fun testParse(){
|
||||
val nameString = "a.b.c.d"
|
||||
val pathString = "a.b/c.d"
|
||||
assertEquals(1, Path.parse(nameString).length)
|
||||
assertEquals(2, Path.parse(pathString).length)
|
||||
}
|
||||
}
|
@ -1,16 +0,0 @@
|
||||
package hep.dataforge.context
|
||||
|
||||
|
||||
actual object PluginRepository {
|
||||
|
||||
private val factories: MutableSet<PluginFactory<*>> = HashSet()
|
||||
|
||||
actual fun register(factory: PluginFactory<*>) {
|
||||
factories.add(factory)
|
||||
}
|
||||
|
||||
/**
|
||||
* List plugins available in the repository
|
||||
*/
|
||||
actual fun list(): Sequence<PluginFactory<*>> = factories.asSequence()
|
||||
}
|
@ -0,0 +1,32 @@
|
||||
package space.kscience.dataforge.context
|
||||
|
||||
import space.kscience.dataforge.meta.Meta
|
||||
import space.kscience.dataforge.names.Name
|
||||
|
||||
public class ConsoleLogManager : AbstractPlugin(), LogManager {
|
||||
|
||||
override fun logger(name: Name): Logger = Logger { tag, body ->
|
||||
val message: String = body.safe
|
||||
when (tag) {
|
||||
// TODO depends on https://youtrack.jetbrains.com/issue/KT-33595/
|
||||
LogManager.DEBUG -> console.asDynamic().debug("[${context.name}] $name: $message")
|
||||
LogManager.INFO -> console.info("[${context.name}] $name: $message")
|
||||
LogManager.WARNING -> console.warn("[${context.name}] $name: $message")
|
||||
LogManager.ERROR -> console.error("[${context.name}] $name: $message")
|
||||
else -> console.log("[${context.name}] $name: $message")
|
||||
}
|
||||
}
|
||||
|
||||
override val defaultLogger: Logger = logger(Name.EMPTY)
|
||||
|
||||
|
||||
override val tag: PluginTag get() = Companion.tag
|
||||
|
||||
public companion object : PluginFactory<ConsoleLogManager> {
|
||||
override fun build(context: Context, meta: Meta): ConsoleLogManager = ConsoleLogManager()
|
||||
|
||||
override val tag: PluginTag = PluginTag(group = PluginTag.DATAFORGE_GROUP, name = "log.jsConsole")
|
||||
}
|
||||
}
|
||||
|
||||
internal actual fun getGlobalLoggerFactory(): PluginFactory<out LogManager> = ConsoleLogManager
|
@ -0,0 +1,32 @@
|
||||
package space.kscience.dataforge.properties
|
||||
|
||||
import org.w3c.dom.HTMLInputElement
|
||||
import space.kscience.dataforge.misc.DFExperimental
|
||||
|
||||
@DFExperimental
|
||||
public fun HTMLInputElement.bindValue(property: Property<String>) {
|
||||
if (this.onchange != null) error("Input element already bound")
|
||||
this.onchange = {
|
||||
property.value = this.value
|
||||
Unit
|
||||
}
|
||||
property.onChange(this) {
|
||||
if (value != it) {
|
||||
value = it
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@DFExperimental
|
||||
public fun HTMLInputElement.bindChecked(property: Property<Boolean>) {
|
||||
if (this.onchange != null) error("Input element already bound")
|
||||
this.onchange = {
|
||||
property.value = this.checked
|
||||
Unit
|
||||
}
|
||||
property.onChange(this) {
|
||||
if (checked != it) {
|
||||
checked = it
|
||||
}
|
||||
}
|
||||
}
|
@ -1,17 +0,0 @@
|
||||
package hep.dataforge.context
|
||||
|
||||
actual object PluginRepository {
|
||||
|
||||
private val factories: MutableSet<PluginFactory<*>> = HashSet()
|
||||
|
||||
actual fun register(factory: PluginFactory<*>) {
|
||||
factories.add(factory)
|
||||
}
|
||||
|
||||
/**
|
||||
* List plugins available in the repository
|
||||
*/
|
||||
actual fun list(): Sequence<PluginFactory<*>> =
|
||||
factories.asSequence() + Global.services()
|
||||
|
||||
}
|
@ -1,44 +0,0 @@
|
||||
package hep.dataforge.provider
|
||||
|
||||
import hep.dataforge.context.Context
|
||||
import hep.dataforge.context.content
|
||||
import hep.dataforge.names.Name
|
||||
import kotlin.reflect.KClass
|
||||
import kotlin.reflect.full.findAnnotation
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
object Types {
|
||||
operator fun get(cl: KClass<*>): String {
|
||||
return cl.findAnnotation<Type>()?.id ?: cl.simpleName ?: ""
|
||||
}
|
||||
|
||||
operator fun get(obj: Any): String {
|
||||
return get(obj::class)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Provide an object with given name inferring target from its type using [Type] annotation
|
||||
*/
|
||||
inline fun <reified T : Any> Provider.provideByType(name: String): T? {
|
||||
val target = Types[T::class]
|
||||
return provide(target, name)
|
||||
}
|
||||
|
||||
inline fun <reified T : Any> Provider.provideByType(name: Name): T? {
|
||||
val target = Types[T::class]
|
||||
return provide(target, name)
|
||||
}
|
||||
|
||||
inline fun <reified T : Any> Provider.top(): Map<Name, T> {
|
||||
val target = Types[T::class]
|
||||
return top(target)
|
||||
}
|
||||
|
||||
/**
|
||||
* A sequences of all objects provided by plugins with given target and type
|
||||
*/
|
||||
inline fun <reified T : Any> Context.content(): Map<Name, T> = content<T>(Types[T::class])
|
||||
|
@ -13,30 +13,30 @@
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package hep.dataforge.context
|
||||
package space.kscience.dataforge.context
|
||||
|
||||
import java.util.*
|
||||
import kotlin.reflect.KClass
|
||||
import kotlin.reflect.full.cast
|
||||
|
||||
class ClassLoaderPlugin(val classLoader: ClassLoader) : AbstractPlugin() {
|
||||
public class ClassLoaderPlugin(private val classLoader: ClassLoader) : AbstractPlugin() {
|
||||
override val tag: PluginTag = PluginTag("classLoader", PluginTag.DATAFORGE_GROUP)
|
||||
|
||||
private val serviceCache: MutableMap<Class<*>, ServiceLoader<*>> = HashMap()
|
||||
|
||||
fun <T : Any> services(type: KClass<T>): Sequence<T> {
|
||||
public fun <T : Any> services(type: KClass<T>): Sequence<T> {
|
||||
return serviceCache.getOrPut(type.java) { ServiceLoader.load(type.java, classLoader) }.asSequence()
|
||||
.map { type.cast(it) }
|
||||
}
|
||||
|
||||
companion object {
|
||||
val DEFAULT = ClassLoaderPlugin(Global::class.java.classLoader)
|
||||
public companion object {
|
||||
public val DEFAULT: ClassLoaderPlugin = ClassLoaderPlugin(Global::class.java.classLoader)
|
||||
}
|
||||
}
|
||||
|
||||
val Context.classLoaderPlugin get() = this.plugins.get() ?: ClassLoaderPlugin.DEFAULT
|
||||
public val Context.classLoaderPlugin: ClassLoaderPlugin get() = this.plugins.get() ?: ClassLoaderPlugin.DEFAULT
|
||||
|
||||
inline fun <reified T : Any> Context.services() = classLoaderPlugin.services(T::class)
|
||||
public inline fun <reified T : Any> Context.services(): Sequence<T> = classLoaderPlugin.services(T::class)
|
||||
|
||||
|
||||
//open class JVMContext(
|
@ -0,0 +1,32 @@
|
||||
package space.kscience.dataforge.context
|
||||
|
||||
import org.slf4j.LoggerFactory
|
||||
import space.kscience.dataforge.meta.Meta
|
||||
import space.kscience.dataforge.names.Name
|
||||
|
||||
public class SlfLogManager : AbstractPlugin(), LogManager {
|
||||
|
||||
override fun logger(name: Name): Logger = Logger { tag, body ->
|
||||
val logger = LoggerFactory.getLogger("[${context.name}] $name") //KotlinLogging.logger("[${context.name}] $name")
|
||||
val message = body.safe
|
||||
when (tag) {
|
||||
LogManager.DEBUG -> logger.debug(message)
|
||||
LogManager.INFO -> logger.info(message)
|
||||
LogManager.WARNING -> logger.warn(message)
|
||||
LogManager.ERROR -> logger.error(message)
|
||||
else -> logger.trace(message)
|
||||
}
|
||||
}
|
||||
|
||||
override val defaultLogger: Logger = logger(Name.EMPTY)
|
||||
|
||||
override val tag: PluginTag get() = Companion.tag
|
||||
|
||||
public companion object : PluginFactory<SlfLogManager> {
|
||||
override fun build(context: Context, meta: Meta): SlfLogManager = SlfLogManager()
|
||||
|
||||
override val tag: PluginTag = PluginTag(group = PluginTag.DATAFORGE_GROUP, name = "log.kotlinLogging")
|
||||
}
|
||||
}
|
||||
|
||||
internal actual fun getGlobalLoggerFactory(): PluginFactory<out LogManager> = SlfLogManager
|
@ -0,0 +1,126 @@
|
||||
/*
|
||||
* Copyright 2018 Alexander Nozik.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package space.kscience.dataforge.descriptors
|
||||
|
||||
//@MustBeDocumented
|
||||
//annotation class Attribute(
|
||||
// val key: String,
|
||||
// val value: String
|
||||
//)
|
||||
//
|
||||
//@MustBeDocumented
|
||||
//annotation class Attributes(
|
||||
// val attrs: Array<Attribute>
|
||||
//)
|
||||
//
|
||||
//@MustBeDocumented
|
||||
//annotation class ItemDef(
|
||||
// val info: String = "",
|
||||
// val multiple: Boolean = false,
|
||||
// val required: Boolean = false
|
||||
//)
|
||||
//
|
||||
//@Target(AnnotationTarget.PROPERTY)
|
||||
//@MustBeDocumented
|
||||
//annotation class ValueDef(
|
||||
// val type: Array<ValueType> = [ValueType.STRING],
|
||||
// val def: String = "",
|
||||
// val allowed: Array<String> = [],
|
||||
// val enumeration: KClass<*> = Any::class
|
||||
//)
|
||||
|
||||
///**
|
||||
// * Description text for meta property, node or whole object
|
||||
// */
|
||||
//@Target(AnnotationTarget.CLASS, AnnotationTarget.PROPERTY)
|
||||
//@Retention(AnnotationRetention.RUNTIME)
|
||||
//@MustBeDocumented
|
||||
//annotation class Description(val value: String)
|
||||
//
|
||||
///**
|
||||
// * Annotation for value property which states that lists are expected
|
||||
// */
|
||||
//@Target(AnnotationTarget.PROPERTY)
|
||||
//@Retention(AnnotationRetention.RUNTIME)
|
||||
//@MustBeDocumented
|
||||
//annotation class Multiple
|
||||
//
|
||||
///**
|
||||
// * Descriptor target
|
||||
// * The DataForge path to the resource containing the description. Following targets are supported:
|
||||
// * 1. resource
|
||||
// * 1. file
|
||||
// * 1. class
|
||||
// * 1. method
|
||||
// * 1. property
|
||||
// *
|
||||
// *
|
||||
// * Does not work if [type] is provided
|
||||
// */
|
||||
//@Target(AnnotationTarget.CLASS, AnnotationTarget.PROPERTY, AnnotationTarget.VALUE_PARAMETER)
|
||||
//@Retention(AnnotationRetention.RUNTIME)
|
||||
//@MustBeDocumented
|
||||
//annotation class Descriptor(val value: String)
|
||||
//
|
||||
//
|
||||
///**
|
||||
// * Aggregator class for descriptor nodes
|
||||
// */
|
||||
//@Target(AnnotationTarget.CLASS, AnnotationTarget.PROPERTY, AnnotationTarget.FUNCTION, AnnotationTarget.VALUE_PARAMETER)
|
||||
//@Retention(AnnotationRetention.RUNTIME)
|
||||
//@MustBeDocumented
|
||||
//annotation class DescriptorNodes(vararg val nodes: NodeDef)
|
||||
//
|
||||
///**
|
||||
// * Aggregator class for descriptor values
|
||||
// */
|
||||
//@Target(AnnotationTarget.CLASS, AnnotationTarget.PROPERTY, AnnotationTarget.FUNCTION, AnnotationTarget.VALUE_PARAMETER)
|
||||
//@Retention(AnnotationRetention.RUNTIME)
|
||||
//@MustBeDocumented
|
||||
//annotation class DescriptorValues(vararg val nodes: ValueDef)
|
||||
//
|
||||
///**
|
||||
// * Alternative name for property descriptor declaration
|
||||
// */
|
||||
//@Target(AnnotationTarget.PROPERTY, AnnotationTarget.VALUE_PARAMETER)
|
||||
//@Retention(AnnotationRetention.RUNTIME)
|
||||
//@MustBeDocumented
|
||||
//annotation class DescriptorName(val name: String)
|
||||
//
|
||||
//@Target(AnnotationTarget.PROPERTY)
|
||||
//@Retention(AnnotationRetention.RUNTIME)
|
||||
//@MustBeDocumented
|
||||
//annotation class DescriptorValue(val def: ValueDef)
|
||||
////TODO enter fields directly?
|
||||
//
|
||||
//@Target(AnnotationTarget.PROPERTY)
|
||||
//@Retention(AnnotationRetention.RUNTIME)
|
||||
//@MustBeDocumented
|
||||
//annotation class ValueProperty(
|
||||
// val name: String = "",
|
||||
// val type: Array<ValueType> = arrayOf(ValueType.STRING),
|
||||
// val multiple: Boolean = false,
|
||||
// val def: String = "",
|
||||
// val enumeration: KClass<*> = Any::class,
|
||||
// val tags: Array<String> = emptyArray()
|
||||
//)
|
||||
//
|
||||
//
|
||||
//@Target(AnnotationTarget.PROPERTY)
|
||||
//@Retention(AnnotationRetention.RUNTIME)
|
||||
//@MustBeDocumented
|
||||
//annotation class NodeProperty(val name: String = "")
|
@ -0,0 +1,53 @@
|
||||
package space.kscience.dataforge.descriptors
|
||||
|
||||
|
||||
//inline fun <reified T : Scheme> T.buildDescriptor(): NodeDescriptor = NodeDescriptor {
|
||||
// T::class.apply {
|
||||
// findAnnotation<ItemDef>()?.let { def ->
|
||||
// info = def.info
|
||||
// required = def.required
|
||||
// multiple = def.multiple
|
||||
// }
|
||||
// findAnnotation<Attribute>()?.let { attr ->
|
||||
// attributes {
|
||||
// this[attr.key] = attr.value.parseValue()
|
||||
// }
|
||||
// }
|
||||
// findAnnotation<Attributes>()?.attrs?.forEach { attr ->
|
||||
// attributes {
|
||||
// this[attr.key] = attr.value.parseValue()
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
// T::class.memberProperties.forEach { property ->
|
||||
// val delegate = property.getDelegate(this@buildDescriptor)
|
||||
//
|
||||
// val descriptor: ItemDescriptor = when (delegate) {
|
||||
// is ConfigurableDelegate -> buildPropertyDescriptor(property, delegate)
|
||||
// is ReadWriteDelegateWrapper<*, *> -> {
|
||||
// if (delegate.delegate is ConfigurableDelegate) {
|
||||
// buildPropertyDescriptor(property, delegate.delegate as ConfigurableDelegate)
|
||||
// } else {
|
||||
// return@forEach
|
||||
// }
|
||||
// }
|
||||
// else -> return@forEach
|
||||
// }
|
||||
// defineItem(property.name, descriptor)
|
||||
// }
|
||||
//}
|
||||
|
||||
//inline fun <T : Scheme, reified V : Any?> buildPropertyDescriptor(
|
||||
// property: KProperty1<T, V>,
|
||||
// delegate: ConfigurableDelegate
|
||||
//): ItemDescriptor {
|
||||
// when {
|
||||
// V::class.isSubclassOf(Scheme::class) -> NodeDescriptor {
|
||||
// default = delegate.default.node
|
||||
// }
|
||||
// V::class.isSubclassOf(Meta::class) -> NodeDescriptor {
|
||||
// default = delegate.default.node
|
||||
// }
|
||||
//
|
||||
// }
|
||||
//}
|
@ -0,0 +1,49 @@
|
||||
package space.kscience.dataforge.provider
|
||||
|
||||
import space.kscience.dataforge.context.Context
|
||||
import space.kscience.dataforge.context.PluginBuilder
|
||||
import space.kscience.dataforge.context.gather
|
||||
import space.kscience.dataforge.misc.DFExperimental
|
||||
import space.kscience.dataforge.misc.DfId
|
||||
import space.kscience.dataforge.misc.Named
|
||||
import space.kscience.dataforge.names.Name
|
||||
import kotlin.reflect.KClass
|
||||
import kotlin.reflect.full.findAnnotation
|
||||
|
||||
|
||||
@DFExperimental
|
||||
public val KClass<*>.dfId: String
|
||||
get() = findAnnotation<DfId>()?.id ?: simpleName ?: ""
|
||||
|
||||
/**
|
||||
* Provide an object with given name inferring target from its type using [DfId] annotation
|
||||
*/
|
||||
@DFExperimental
|
||||
public inline fun <reified T : Any> Provider.provideByType(name: String): T? {
|
||||
val target = T::class.dfId
|
||||
return provide(target, name)
|
||||
}
|
||||
|
||||
@DFExperimental
|
||||
public inline fun <reified T : Any> Provider.top(): Map<Name, T> {
|
||||
val target = T::class.dfId
|
||||
return top(target)
|
||||
}
|
||||
|
||||
/**
|
||||
* All objects provided by plugins with given target and type
|
||||
*/
|
||||
@DFExperimental
|
||||
public inline fun <reified T : Any> Context.gather(inherit: Boolean = true): Map<Name, T> =
|
||||
gather<T>(T::class.dfId, inherit)
|
||||
|
||||
|
||||
@DFExperimental
|
||||
public inline fun <reified T : Any> PluginBuilder.provides(items: Map<Name, T>) {
|
||||
provides(T::class.dfId, items)
|
||||
}
|
||||
|
||||
@DFExperimental
|
||||
public inline fun <reified T : Any> PluginBuilder.provides(vararg items: Named) {
|
||||
provides(T::class.dfId, *items)
|
||||
}
|
@ -0,0 +1,4 @@
|
||||
package space.kscience.dataforge.context
|
||||
|
||||
|
||||
internal actual fun getGlobalLoggerFactory(): PluginFactory<out LogManager> = DefaultLogManager
|
23
dataforge-data/README.md
Normal file
23
dataforge-data/README.md
Normal file
@ -0,0 +1,23 @@
|
||||
# Module dataforge-data
|
||||
|
||||
|
||||
|
||||
## Usage
|
||||
|
||||
## Artifact:
|
||||
|
||||
The Maven coordinates of this project are `space.kscience:dataforge-data:0.7.0`.
|
||||
|
||||
**Gradle Kotlin DSL:**
|
||||
```kotlin
|
||||
repositories {
|
||||
maven("https://repo.kotlin.link")
|
||||
//uncomment to access development builds
|
||||
//maven("https://maven.pkg.jetbrains.space/spc/p/sci/dev")
|
||||
mavenCentral()
|
||||
}
|
||||
|
||||
dependencies {
|
||||
implementation("space.kscience:dataforge-data:0.7.0")
|
||||
}
|
||||
```
|
391
dataforge-data/api/dataforge-data.api
Normal file
391
dataforge-data/api/dataforge-data.api
Normal file
@ -0,0 +1,391 @@
|
||||
public abstract interface class hep/dataforge/data/Action {
|
||||
public abstract fun invoke (Lhep/dataforge/data/DataNode;Lhep/dataforge/meta/Meta;)Lhep/dataforge/data/DataNode;
|
||||
public abstract fun isTerminal ()Z
|
||||
}
|
||||
|
||||
public final class hep/dataforge/data/Action$DefaultImpls {
|
||||
public static fun isTerminal (Lhep/dataforge/data/Action;)Z
|
||||
}
|
||||
|
||||
public final class hep/dataforge/data/ActionEnv {
|
||||
public fun <init> (Lhep/dataforge/names/Name;Lhep/dataforge/meta/Meta;Lhep/dataforge/meta/Meta;)V
|
||||
public final fun component1 ()Lhep/dataforge/names/Name;
|
||||
public final fun component2 ()Lhep/dataforge/meta/Meta;
|
||||
public final fun component3 ()Lhep/dataforge/meta/Meta;
|
||||
public final fun copy (Lhep/dataforge/names/Name;Lhep/dataforge/meta/Meta;Lhep/dataforge/meta/Meta;)Lhep/dataforge/data/ActionEnv;
|
||||
public static synthetic fun copy$default (Lhep/dataforge/data/ActionEnv;Lhep/dataforge/names/Name;Lhep/dataforge/meta/Meta;Lhep/dataforge/meta/Meta;ILjava/lang/Object;)Lhep/dataforge/data/ActionEnv;
|
||||
public fun equals (Ljava/lang/Object;)Z
|
||||
public final fun getActionMeta ()Lhep/dataforge/meta/Meta;
|
||||
public final fun getMeta ()Lhep/dataforge/meta/Meta;
|
||||
public final fun getName ()Lhep/dataforge/names/Name;
|
||||
public fun hashCode ()I
|
||||
public fun toString ()Ljava/lang/String;
|
||||
}
|
||||
|
||||
public final class hep/dataforge/data/ActionKt {
|
||||
public static final fun then (Lhep/dataforge/data/Action;Lhep/dataforge/data/Action;)Lhep/dataforge/data/Action;
|
||||
}
|
||||
|
||||
public final class hep/dataforge/data/ComputationData : hep/dataforge/data/ComputationGoal, hep/dataforge/data/Data {
|
||||
public fun <init> (Lkotlin/reflect/KClass;Lhep/dataforge/meta/Meta;Lkotlin/coroutines/CoroutineContext;Ljava/util/Collection;Lkotlin/jvm/functions/Function2;)V
|
||||
public synthetic fun <init> (Lkotlin/reflect/KClass;Lhep/dataforge/meta/Meta;Lkotlin/coroutines/CoroutineContext;Ljava/util/Collection;Lkotlin/jvm/functions/Function2;ILkotlin/jvm/internal/DefaultConstructorMarker;)V
|
||||
public fun getMeta ()Lhep/dataforge/meta/Meta;
|
||||
public fun getType ()Lkotlin/reflect/KClass;
|
||||
public fun toMeta ()Lhep/dataforge/meta/Meta;
|
||||
}
|
||||
|
||||
public class hep/dataforge/data/ComputationGoal : hep/dataforge/data/Goal {
|
||||
public fun <init> (Lkotlin/coroutines/CoroutineContext;Ljava/util/Collection;Lkotlin/jvm/functions/Function2;)V
|
||||
public synthetic fun <init> (Lkotlin/coroutines/CoroutineContext;Ljava/util/Collection;Lkotlin/jvm/functions/Function2;ILkotlin/jvm/internal/DefaultConstructorMarker;)V
|
||||
public final fun getBlock ()Lkotlin/jvm/functions/Function2;
|
||||
public fun getDependencies ()Ljava/util/Collection;
|
||||
public final fun getResult ()Lkotlinx/coroutines/Deferred;
|
||||
public fun reset ()V
|
||||
public fun startAsync (Lkotlinx/coroutines/CoroutineScope;)Lkotlinx/coroutines/Deferred;
|
||||
}
|
||||
|
||||
public final class hep/dataforge/data/CoroutineMonitor : kotlin/coroutines/CoroutineContext$Element {
|
||||
public static final field Companion Lhep/dataforge/data/CoroutineMonitor$Companion;
|
||||
public fun <init> ()V
|
||||
public final fun finish ()V
|
||||
public fun fold (Ljava/lang/Object;Lkotlin/jvm/functions/Function2;)Ljava/lang/Object;
|
||||
public fun get (Lkotlin/coroutines/CoroutineContext$Key;)Lkotlin/coroutines/CoroutineContext$Element;
|
||||
public fun getKey ()Lkotlin/coroutines/CoroutineContext$Key;
|
||||
public final fun getStatus ()Ljava/lang/String;
|
||||
public final fun getTotalWork ()D
|
||||
public final fun getWorkDone ()D
|
||||
public fun minusKey (Lkotlin/coroutines/CoroutineContext$Key;)Lkotlin/coroutines/CoroutineContext;
|
||||
public fun plus (Lkotlin/coroutines/CoroutineContext;)Lkotlin/coroutines/CoroutineContext;
|
||||
public final fun setStatus (Ljava/lang/String;)V
|
||||
public final fun setTotalWork (D)V
|
||||
public final fun setWorkDone (D)V
|
||||
public final fun start ()V
|
||||
}
|
||||
|
||||
public final class hep/dataforge/data/CoroutineMonitor$Companion : kotlin/coroutines/CoroutineContext$Key {
|
||||
}
|
||||
|
||||
public final class hep/dataforge/data/CoroutineMonitorKt {
|
||||
public static final fun getDependencies (Lkotlinx/coroutines/Job;)Ljava/util/Collection;
|
||||
public static final fun getMonitor (Lkotlin/coroutines/CoroutineContext;)Lhep/dataforge/data/CoroutineMonitor;
|
||||
public static final fun getMonitor (Lkotlinx/coroutines/CoroutineScope;)Lhep/dataforge/data/CoroutineMonitor;
|
||||
public static final fun getProgress (Lkotlinx/coroutines/Job;)D
|
||||
public static final fun getStatus (Lkotlinx/coroutines/Job;)Ljava/lang/String;
|
||||
public static final fun getTotalWork (Lkotlinx/coroutines/Job;)D
|
||||
public static final fun getWorkDone (Lkotlinx/coroutines/Job;)D
|
||||
}
|
||||
|
||||
public abstract interface class hep/dataforge/data/Data : hep/dataforge/data/Goal, hep/dataforge/meta/MetaRepr {
|
||||
public static final field Companion Lhep/dataforge/data/Data$Companion;
|
||||
public static final field TYPE Ljava/lang/String;
|
||||
public abstract fun getMeta ()Lhep/dataforge/meta/Meta;
|
||||
public abstract fun getType ()Lkotlin/reflect/KClass;
|
||||
public abstract fun toMeta ()Lhep/dataforge/meta/Meta;
|
||||
}
|
||||
|
||||
public final class hep/dataforge/data/Data$Companion {
|
||||
public static final field TYPE Ljava/lang/String;
|
||||
public final fun invoke (Ljava/lang/String;Lkotlin/reflect/KClass;Lhep/dataforge/meta/Meta;Lkotlin/coroutines/CoroutineContext;Ljava/util/Collection;Lkotlin/jvm/functions/Function2;)Lhep/dataforge/data/Data;
|
||||
public final fun invoke (Lkotlin/reflect/KClass;Lhep/dataforge/meta/Meta;Lkotlin/coroutines/CoroutineContext;Ljava/util/Collection;Lkotlin/jvm/functions/Function2;)Lhep/dataforge/data/Data;
|
||||
public static synthetic fun invoke$default (Lhep/dataforge/data/Data$Companion;Ljava/lang/String;Lkotlin/reflect/KClass;Lhep/dataforge/meta/Meta;Lkotlin/coroutines/CoroutineContext;Ljava/util/Collection;Lkotlin/jvm/functions/Function2;ILjava/lang/Object;)Lhep/dataforge/data/Data;
|
||||
public static synthetic fun invoke$default (Lhep/dataforge/data/Data$Companion;Lkotlin/reflect/KClass;Lhep/dataforge/meta/Meta;Lkotlin/coroutines/CoroutineContext;Ljava/util/Collection;Lkotlin/jvm/functions/Function2;ILjava/lang/Object;)Lhep/dataforge/data/Data;
|
||||
public final fun static (Ljava/lang/Object;Lhep/dataforge/meta/Meta;)Lhep/dataforge/data/Data;
|
||||
public static synthetic fun static$default (Lhep/dataforge/data/Data$Companion;Ljava/lang/Object;Lhep/dataforge/meta/Meta;ILjava/lang/Object;)Lhep/dataforge/data/Data;
|
||||
}
|
||||
|
||||
public final class hep/dataforge/data/Data$DefaultImpls {
|
||||
public static fun toMeta (Lhep/dataforge/data/Data;)Lhep/dataforge/meta/Meta;
|
||||
}
|
||||
|
||||
public final class hep/dataforge/data/DataFilter : hep/dataforge/meta/Scheme {
|
||||
public static final field Companion Lhep/dataforge/data/DataFilter$Companion;
|
||||
public fun <init> ()V
|
||||
public final fun getFrom ()Ljava/lang/String;
|
||||
public final fun getPattern ()Ljava/lang/String;
|
||||
public final fun getTo ()Ljava/lang/String;
|
||||
public final fun setFrom (Ljava/lang/String;)V
|
||||
public final fun setPattern (Ljava/lang/String;)V
|
||||
public final fun setTo (Ljava/lang/String;)V
|
||||
}
|
||||
|
||||
public final class hep/dataforge/data/DataFilter$Companion : hep/dataforge/meta/SchemeSpec {
|
||||
}
|
||||
|
||||
public final class hep/dataforge/data/DataFilterKt {
|
||||
public static final fun filter (Lhep/dataforge/data/DataNode;Lhep/dataforge/data/DataFilter;)Lhep/dataforge/data/DataNode;
|
||||
public static final fun filter (Lhep/dataforge/data/DataNode;Lhep/dataforge/meta/Meta;)Lhep/dataforge/data/DataNode;
|
||||
public static final fun filter (Lhep/dataforge/data/DataNode;Lkotlin/jvm/functions/Function1;)Lhep/dataforge/data/DataNode;
|
||||
}
|
||||
|
||||
public abstract class hep/dataforge/data/DataItem : hep/dataforge/meta/MetaRepr {
|
||||
public abstract fun getMeta ()Lhep/dataforge/meta/Meta;
|
||||
public abstract fun getType ()Lkotlin/reflect/KClass;
|
||||
}
|
||||
|
||||
public final class hep/dataforge/data/DataItem$Leaf : hep/dataforge/data/DataItem {
|
||||
public fun <init> (Lhep/dataforge/data/Data;)V
|
||||
public final fun getData ()Lhep/dataforge/data/Data;
|
||||
public fun getMeta ()Lhep/dataforge/meta/Meta;
|
||||
public fun getType ()Lkotlin/reflect/KClass;
|
||||
public fun toMeta ()Lhep/dataforge/meta/Meta;
|
||||
}
|
||||
|
||||
public final class hep/dataforge/data/DataItem$Node : hep/dataforge/data/DataItem {
|
||||
public fun <init> (Lhep/dataforge/data/DataNode;)V
|
||||
public fun getMeta ()Lhep/dataforge/meta/Meta;
|
||||
public final fun getNode ()Lhep/dataforge/data/DataNode;
|
||||
public fun getType ()Lkotlin/reflect/KClass;
|
||||
public fun toMeta ()Lhep/dataforge/meta/Meta;
|
||||
}
|
||||
|
||||
public final class hep/dataforge/data/DataJVMKt {
|
||||
public static final fun canCast (Lhep/dataforge/data/DataItem;Lkotlin/reflect/KClass;)Z
|
||||
public static final fun cast (Lhep/dataforge/data/Data;Lkotlin/reflect/KClass;)Lhep/dataforge/data/Data;
|
||||
public static final fun cast (Lhep/dataforge/data/DataNode;Lkotlin/reflect/KClass;)Lhep/dataforge/data/DataNode;
|
||||
public static final fun ensureType (Lhep/dataforge/data/DataNode;Lkotlin/reflect/KClass;)V
|
||||
public static final fun filterIsInstance (Lhep/dataforge/data/Data;Lkotlin/reflect/KClass;)Lhep/dataforge/data/Data;
|
||||
public static final fun filterIsInstance (Lhep/dataforge/data/DataItem;Lkotlin/reflect/KClass;)Lhep/dataforge/data/DataItem;
|
||||
public static final fun filterIsInstance (Lhep/dataforge/data/DataNode;Lkotlin/reflect/KClass;)Lhep/dataforge/data/DataNode;
|
||||
public static final fun get (Lhep/dataforge/data/Data;)Ljava/lang/Object;
|
||||
public static final fun upcast (Lhep/dataforge/data/Data;Lkotlin/reflect/KClass;)Lhep/dataforge/data/Data;
|
||||
}
|
||||
|
||||
public final class hep/dataforge/data/DataKt {
|
||||
public static final fun map (Lhep/dataforge/data/Data;Lkotlin/reflect/KClass;Lkotlin/coroutines/CoroutineContext;Lhep/dataforge/meta/Meta;Lkotlin/jvm/functions/Function3;)Lhep/dataforge/data/Data;
|
||||
public static synthetic fun map$default (Lhep/dataforge/data/Data;Lkotlin/reflect/KClass;Lkotlin/coroutines/CoroutineContext;Lhep/dataforge/meta/Meta;Lkotlin/jvm/functions/Function3;ILjava/lang/Object;)Lhep/dataforge/data/Data;
|
||||
public static final fun reduce (Ljava/util/Map;Lkotlin/reflect/KClass;Lkotlin/coroutines/CoroutineContext;Lhep/dataforge/meta/Meta;Lkotlin/jvm/functions/Function3;)Lhep/dataforge/data/ComputationData;
|
||||
public static synthetic fun reduce$default (Ljava/util/Map;Lkotlin/reflect/KClass;Lkotlin/coroutines/CoroutineContext;Lhep/dataforge/meta/Meta;Lkotlin/jvm/functions/Function3;ILjava/lang/Object;)Lhep/dataforge/data/ComputationData;
|
||||
}
|
||||
|
||||
public abstract interface class hep/dataforge/data/DataNode : hep/dataforge/meta/MetaRepr {
|
||||
public static final field Companion Lhep/dataforge/data/DataNode$Companion;
|
||||
public static final field TYPE Ljava/lang/String;
|
||||
public abstract fun getItems ()Ljava/util/Map;
|
||||
public abstract fun getMeta ()Lhep/dataforge/meta/Meta;
|
||||
public abstract fun getType ()Lkotlin/reflect/KClass;
|
||||
public abstract fun toMeta ()Lhep/dataforge/meta/Meta;
|
||||
}
|
||||
|
||||
public final class hep/dataforge/data/DataNode$Companion {
|
||||
public static final field TYPE Ljava/lang/String;
|
||||
public final fun builder (Lkotlin/reflect/KClass;)Lhep/dataforge/data/DataTreeBuilder;
|
||||
}
|
||||
|
||||
public final class hep/dataforge/data/DataNode$DefaultImpls {
|
||||
public static fun toMeta (Lhep/dataforge/data/DataNode;)Lhep/dataforge/meta/Meta;
|
||||
}
|
||||
|
||||
public final class hep/dataforge/data/DataNodeKt {
|
||||
public static final fun dataSequence (Lhep/dataforge/data/DataNode;)Lkotlin/sequences/Sequence;
|
||||
public static final fun filter (Lhep/dataforge/data/DataNode;Lkotlin/jvm/functions/Function2;)Lhep/dataforge/data/DataNode;
|
||||
public static final fun first (Lhep/dataforge/data/DataNode;)Lhep/dataforge/data/Data;
|
||||
public static final fun get (Lhep/dataforge/data/DataNode;Lhep/dataforge/names/Name;)Lhep/dataforge/data/DataItem;
|
||||
public static final fun get (Lhep/dataforge/data/DataNode;Ljava/lang/String;)Lhep/dataforge/data/DataItem;
|
||||
public static final fun getData (Lhep/dataforge/data/DataItem;)Lhep/dataforge/data/Data;
|
||||
public static final fun getNode (Lhep/dataforge/data/DataItem;)Lhep/dataforge/data/DataNode;
|
||||
public static final fun itemSequence (Lhep/dataforge/data/DataNode;)Lkotlin/sequences/Sequence;
|
||||
public static final fun iterator (Lhep/dataforge/data/DataNode;)Ljava/util/Iterator;
|
||||
public static final fun join (Lhep/dataforge/data/DataNode;Lkotlin/coroutines/Continuation;)Ljava/lang/Object;
|
||||
public static final fun startAll (Lhep/dataforge/data/DataNode;Lkotlinx/coroutines/CoroutineScope;)Lkotlinx/coroutines/Job;
|
||||
}
|
||||
|
||||
public final class hep/dataforge/data/DataTree : hep/dataforge/data/DataNode {
|
||||
public fun getItems ()Ljava/util/Map;
|
||||
public fun getMeta ()Lhep/dataforge/meta/Meta;
|
||||
public fun getType ()Lkotlin/reflect/KClass;
|
||||
public fun toMeta ()Lhep/dataforge/meta/Meta;
|
||||
}
|
||||
|
||||
public final class hep/dataforge/data/DataTreeBuilder {
|
||||
public fun <init> (Lkotlin/reflect/KClass;)V
|
||||
public final fun build ()Lhep/dataforge/data/DataTree;
|
||||
public final fun getType ()Lkotlin/reflect/KClass;
|
||||
public final fun meta (Lhep/dataforge/meta/Meta;)V
|
||||
public final fun meta (Lkotlin/jvm/functions/Function1;)Lhep/dataforge/meta/MetaBuilder;
|
||||
public final fun put (Ljava/lang/String;Lhep/dataforge/data/Data;)V
|
||||
public final fun put (Ljava/lang/String;Lhep/dataforge/data/DataItem;)V
|
||||
public final fun put (Ljava/lang/String;Lhep/dataforge/data/DataNode;)V
|
||||
public final fun put (Ljava/lang/String;Lkotlin/jvm/functions/Function1;)V
|
||||
public final fun set (Lhep/dataforge/names/Name;Lhep/dataforge/data/Data;)V
|
||||
public final fun set (Lhep/dataforge/names/Name;Lhep/dataforge/data/DataItem;)V
|
||||
public final fun set (Lhep/dataforge/names/Name;Lhep/dataforge/data/DataNode;)V
|
||||
public final fun set (Lhep/dataforge/names/Name;Lhep/dataforge/data/DataTreeBuilder;)V
|
||||
public final fun set (Lhep/dataforge/names/NameToken;Lhep/dataforge/data/Data;)V
|
||||
public final fun set (Lhep/dataforge/names/NameToken;Lhep/dataforge/data/DataTreeBuilder;)V
|
||||
public final fun update (Lhep/dataforge/data/DataNode;)V
|
||||
}
|
||||
|
||||
public final class hep/dataforge/data/DataTreeBuilderKt {
|
||||
public static final fun DataTree (Lkotlin/reflect/KClass;Lkotlin/jvm/functions/Function1;)Lhep/dataforge/data/DataTree;
|
||||
public static final fun builder (Lhep/dataforge/data/DataNode;)Lhep/dataforge/data/DataTreeBuilder;
|
||||
public static final fun datum (Lhep/dataforge/data/DataTreeBuilder;Lhep/dataforge/names/Name;Lhep/dataforge/data/Data;)V
|
||||
public static final fun datum (Lhep/dataforge/data/DataTreeBuilder;Ljava/lang/String;Lhep/dataforge/data/Data;)V
|
||||
public static final fun node (Lhep/dataforge/data/DataTreeBuilder;Lhep/dataforge/names/Name;Lhep/dataforge/data/DataNode;)V
|
||||
public static final fun node (Lhep/dataforge/data/DataTreeBuilder;Ljava/lang/String;Lhep/dataforge/data/DataNode;)V
|
||||
public static final fun static (Lhep/dataforge/data/DataTreeBuilder;Lhep/dataforge/names/Name;Ljava/lang/Object;Lhep/dataforge/meta/Meta;)V
|
||||
public static final fun static (Lhep/dataforge/data/DataTreeBuilder;Lhep/dataforge/names/Name;Ljava/lang/Object;Lkotlin/jvm/functions/Function1;)V
|
||||
public static final fun static (Lhep/dataforge/data/DataTreeBuilder;Ljava/lang/String;Ljava/lang/Object;Lkotlin/jvm/functions/Function1;)V
|
||||
public static synthetic fun static$default (Lhep/dataforge/data/DataTreeBuilder;Lhep/dataforge/names/Name;Ljava/lang/Object;Lhep/dataforge/meta/Meta;ILjava/lang/Object;)V
|
||||
public static synthetic fun static$default (Lhep/dataforge/data/DataTreeBuilder;Lhep/dataforge/names/Name;Ljava/lang/Object;Lkotlin/jvm/functions/Function1;ILjava/lang/Object;)V
|
||||
public static synthetic fun static$default (Lhep/dataforge/data/DataTreeBuilder;Ljava/lang/String;Ljava/lang/Object;Lkotlin/jvm/functions/Function1;ILjava/lang/Object;)V
|
||||
}
|
||||
|
||||
public final class hep/dataforge/data/Dependencies : kotlin/coroutines/CoroutineContext$Element {
|
||||
public static final field Companion Lhep/dataforge/data/Dependencies$Companion;
|
||||
public fun <init> (Ljava/util/Collection;)V
|
||||
public fun fold (Ljava/lang/Object;Lkotlin/jvm/functions/Function2;)Ljava/lang/Object;
|
||||
public fun get (Lkotlin/coroutines/CoroutineContext$Key;)Lkotlin/coroutines/CoroutineContext$Element;
|
||||
public fun getKey ()Lkotlin/coroutines/CoroutineContext$Key;
|
||||
public final fun getValues ()Ljava/util/Collection;
|
||||
public fun minusKey (Lkotlin/coroutines/CoroutineContext$Key;)Lkotlin/coroutines/CoroutineContext;
|
||||
public fun plus (Lkotlin/coroutines/CoroutineContext;)Lkotlin/coroutines/CoroutineContext;
|
||||
}
|
||||
|
||||
public final class hep/dataforge/data/Dependencies$Companion : kotlin/coroutines/CoroutineContext$Key {
|
||||
}
|
||||
|
||||
public final class hep/dataforge/data/FragmentRule {
|
||||
public field result Lkotlin/jvm/functions/Function2;
|
||||
public fun <init> (Lhep/dataforge/names/Name;Lhep/dataforge/meta/MetaBuilder;)V
|
||||
public final fun getMeta ()Lhep/dataforge/meta/MetaBuilder;
|
||||
public final fun getName ()Lhep/dataforge/names/Name;
|
||||
public final fun getResult ()Lkotlin/jvm/functions/Function2;
|
||||
public final fun result (Lkotlin/jvm/functions/Function2;)V
|
||||
public final fun setMeta (Lhep/dataforge/meta/MetaBuilder;)V
|
||||
public final fun setResult (Lkotlin/jvm/functions/Function2;)V
|
||||
}
|
||||
|
||||
public abstract interface class hep/dataforge/data/Goal {
|
||||
public static final field Companion Lhep/dataforge/data/Goal$Companion;
|
||||
public abstract fun getDependencies ()Ljava/util/Collection;
|
||||
public abstract fun getResult ()Lkotlinx/coroutines/Deferred;
|
||||
public abstract fun reset ()V
|
||||
public abstract fun startAsync (Lkotlinx/coroutines/CoroutineScope;)Lkotlinx/coroutines/Deferred;
|
||||
}
|
||||
|
||||
public final class hep/dataforge/data/Goal$Companion {
|
||||
}
|
||||
|
||||
public final class hep/dataforge/data/GoalKt {
|
||||
public static final fun await (Lhep/dataforge/data/Goal;Lkotlin/coroutines/Continuation;)Ljava/lang/Object;
|
||||
public static final fun isComplete (Lhep/dataforge/data/Goal;)Z
|
||||
public static final fun map (Lhep/dataforge/data/Goal;Lkotlin/coroutines/CoroutineContext;Lkotlin/jvm/functions/Function3;)Lhep/dataforge/data/Goal;
|
||||
public static synthetic fun map$default (Lhep/dataforge/data/Goal;Lkotlin/coroutines/CoroutineContext;Lkotlin/jvm/functions/Function3;ILjava/lang/Object;)Lhep/dataforge/data/Goal;
|
||||
public static final fun reduce (Ljava/util/Collection;Lkotlin/coroutines/CoroutineContext;Lkotlin/jvm/functions/Function3;)Lhep/dataforge/data/Goal;
|
||||
public static final fun reduce (Ljava/util/Map;Lkotlin/coroutines/CoroutineContext;Lkotlin/jvm/functions/Function3;)Lhep/dataforge/data/Goal;
|
||||
public static synthetic fun reduce$default (Ljava/util/Collection;Lkotlin/coroutines/CoroutineContext;Lkotlin/jvm/functions/Function3;ILjava/lang/Object;)Lhep/dataforge/data/Goal;
|
||||
public static synthetic fun reduce$default (Ljava/util/Map;Lkotlin/coroutines/CoroutineContext;Lkotlin/jvm/functions/Function3;ILjava/lang/Object;)Lhep/dataforge/data/Goal;
|
||||
}
|
||||
|
||||
public abstract interface class hep/dataforge/data/GroupRule {
|
||||
public static final field Companion Lhep/dataforge/data/GroupRule$Companion;
|
||||
public abstract fun invoke (Lhep/dataforge/data/DataNode;)Ljava/util/Map;
|
||||
}
|
||||
|
||||
public final class hep/dataforge/data/GroupRule$Companion {
|
||||
public final fun byMeta (Lhep/dataforge/meta/Meta;)Lhep/dataforge/data/GroupRule;
|
||||
public final fun byValue (Ljava/lang/String;Ljava/lang/String;)Lhep/dataforge/data/GroupRule;
|
||||
}
|
||||
|
||||
public final class hep/dataforge/data/JoinGroup {
|
||||
public field result Lkotlin/jvm/functions/Function3;
|
||||
public fun <init> (Ljava/lang/String;Lhep/dataforge/data/DataNode;)V
|
||||
public final fun getMeta ()Lhep/dataforge/meta/MetaBuilder;
|
||||
public final fun getName ()Ljava/lang/String;
|
||||
public final fun getResult ()Lkotlin/jvm/functions/Function3;
|
||||
public final fun result (Lkotlin/jvm/functions/Function3;)V
|
||||
public final fun setMeta (Lhep/dataforge/meta/MetaBuilder;)V
|
||||
public final fun setName (Ljava/lang/String;)V
|
||||
public final fun setResult (Lkotlin/jvm/functions/Function3;)V
|
||||
}
|
||||
|
||||
public final class hep/dataforge/data/MapAction : hep/dataforge/data/Action {
|
||||
public fun <init> (Lkotlin/reflect/KClass;Lkotlin/jvm/functions/Function1;)V
|
||||
public fun invoke (Lhep/dataforge/data/DataNode;Lhep/dataforge/meta/Meta;)Lhep/dataforge/data/DataNode;
|
||||
public fun isTerminal ()Z
|
||||
}
|
||||
|
||||
public final class hep/dataforge/data/MapActionBuilder {
|
||||
public field result Lkotlin/jvm/functions/Function3;
|
||||
public fun <init> (Lhep/dataforge/names/Name;Lhep/dataforge/meta/MetaBuilder;Lhep/dataforge/meta/Meta;)V
|
||||
public final fun getActionMeta ()Lhep/dataforge/meta/Meta;
|
||||
public final fun getMeta ()Lhep/dataforge/meta/MetaBuilder;
|
||||
public final fun getName ()Lhep/dataforge/names/Name;
|
||||
public final fun getResult ()Lkotlin/jvm/functions/Function3;
|
||||
public final fun result (Lkotlin/jvm/functions/Function3;)V
|
||||
public final fun setMeta (Lhep/dataforge/meta/MetaBuilder;)V
|
||||
public final fun setName (Lhep/dataforge/names/Name;)V
|
||||
public final fun setResult (Lkotlin/jvm/functions/Function3;)V
|
||||
}
|
||||
|
||||
public final class hep/dataforge/data/NamedData : hep/dataforge/data/Data {
|
||||
public fun <init> (Ljava/lang/String;Lhep/dataforge/data/Data;)V
|
||||
public fun getDependencies ()Ljava/util/Collection;
|
||||
public fun getMeta ()Lhep/dataforge/meta/Meta;
|
||||
public final fun getName ()Ljava/lang/String;
|
||||
public fun getResult ()Lkotlinx/coroutines/Deferred;
|
||||
public fun getType ()Lkotlin/reflect/KClass;
|
||||
public fun reset ()V
|
||||
public fun startAsync (Lkotlinx/coroutines/CoroutineScope;)Lkotlinx/coroutines/Deferred;
|
||||
public fun toMeta ()Lhep/dataforge/meta/Meta;
|
||||
}
|
||||
|
||||
public final class hep/dataforge/data/ReduceAction : hep/dataforge/data/Action {
|
||||
public fun <init> (Lkotlin/reflect/KClass;Lkotlin/jvm/functions/Function1;)V
|
||||
public fun invoke (Lhep/dataforge/data/DataNode;Lhep/dataforge/meta/Meta;)Lhep/dataforge/data/DataNode;
|
||||
public fun isTerminal ()Z
|
||||
}
|
||||
|
||||
public final class hep/dataforge/data/ReduceActionKt {
|
||||
public static final fun get (Ljava/util/Map;Ljava/lang/String;)Ljava/lang/Object;
|
||||
}
|
||||
|
||||
public final class hep/dataforge/data/ReduceGroupBuilder {
|
||||
public fun <init> (Lhep/dataforge/meta/Meta;)V
|
||||
public final fun byValue (Ljava/lang/String;Ljava/lang/String;Lkotlin/jvm/functions/Function1;)V
|
||||
public static synthetic fun byValue$default (Lhep/dataforge/data/ReduceGroupBuilder;Ljava/lang/String;Ljava/lang/String;Lkotlin/jvm/functions/Function1;ILjava/lang/Object;)V
|
||||
public final fun getActionMeta ()Lhep/dataforge/meta/Meta;
|
||||
public final fun group (Ljava/lang/String;Lhep/dataforge/data/DataFilter;Lkotlin/jvm/functions/Function1;)V
|
||||
public final fun group (Ljava/lang/String;Lkotlin/jvm/functions/Function2;Lkotlin/jvm/functions/Function1;)V
|
||||
public final fun result (Ljava/lang/String;Lkotlin/jvm/functions/Function3;)V
|
||||
}
|
||||
|
||||
public final class hep/dataforge/data/SplitAction : hep/dataforge/data/Action {
|
||||
public fun <init> (Lkotlin/reflect/KClass;Lkotlin/jvm/functions/Function1;)V
|
||||
public fun invoke (Lhep/dataforge/data/DataNode;Lhep/dataforge/meta/Meta;)Lhep/dataforge/data/DataNode;
|
||||
public fun isTerminal ()Z
|
||||
}
|
||||
|
||||
public final class hep/dataforge/data/SplitBuilder {
|
||||
public fun <init> (Lhep/dataforge/names/Name;Lhep/dataforge/meta/Meta;)V
|
||||
public final fun fragment (Ljava/lang/String;Lkotlin/jvm/functions/Function1;)V
|
||||
public final fun getMeta ()Lhep/dataforge/meta/Meta;
|
||||
public final fun getName ()Lhep/dataforge/names/Name;
|
||||
}
|
||||
|
||||
public final class hep/dataforge/data/StaticData : hep/dataforge/data/StaticGoal, hep/dataforge/data/Data {
|
||||
public fun <init> (Ljava/lang/Object;Lhep/dataforge/meta/Meta;)V
|
||||
public synthetic fun <init> (Ljava/lang/Object;Lhep/dataforge/meta/Meta;ILkotlin/jvm/internal/DefaultConstructorMarker;)V
|
||||
public fun getMeta ()Lhep/dataforge/meta/Meta;
|
||||
public fun getType ()Lkotlin/reflect/KClass;
|
||||
public fun toMeta ()Lhep/dataforge/meta/Meta;
|
||||
}
|
||||
|
||||
public class hep/dataforge/data/StaticGoal : hep/dataforge/data/Goal {
|
||||
public fun <init> (Ljava/lang/Object;)V
|
||||
public fun getDependencies ()Ljava/util/Collection;
|
||||
public fun getResult ()Lkotlinx/coroutines/Deferred;
|
||||
public final fun getValue ()Ljava/lang/Object;
|
||||
public fun reset ()V
|
||||
public fun startAsync (Lkotlinx/coroutines/CoroutineScope;)Lkotlinx/coroutines/Deferred;
|
||||
}
|
||||
|
||||
public final class hep/dataforge/data/TypeFilteredDataNode : hep/dataforge/data/DataNode {
|
||||
public fun <init> (Lhep/dataforge/data/DataNode;Lkotlin/reflect/KClass;)V
|
||||
public fun getItems ()Ljava/util/Map;
|
||||
public fun getMeta ()Lhep/dataforge/meta/Meta;
|
||||
public final fun getOrigin ()Lhep/dataforge/data/DataNode;
|
||||
public fun getType ()Lkotlin/reflect/KClass;
|
||||
public fun toMeta ()Lhep/dataforge/meta/Meta;
|
||||
}
|
||||
|
@ -1,29 +1,18 @@
|
||||
plugins {
|
||||
id("scientifik.mpp")
|
||||
id("space.kscience.gradle.mpp")
|
||||
}
|
||||
|
||||
val coroutinesVersion: String = Scientifik.coroutinesVersion
|
||||
|
||||
kotlin {
|
||||
sourceSets {
|
||||
val commonMain by getting{
|
||||
dependencies {
|
||||
api(project(":dataforge-meta"))
|
||||
api("org.jetbrains.kotlinx:kotlinx-coroutines-core-common:$coroutinesVersion")
|
||||
}
|
||||
}
|
||||
|
||||
val jvmMain by getting{
|
||||
dependencies {
|
||||
api("org.jetbrains.kotlinx:kotlinx-coroutines-core:$coroutinesVersion")
|
||||
api(kotlin("reflect"))
|
||||
}
|
||||
}
|
||||
|
||||
val jsMain by getting{
|
||||
dependencies {
|
||||
api("org.jetbrains.kotlinx:kotlinx-coroutines-core-js:$coroutinesVersion")
|
||||
}
|
||||
}
|
||||
kscience{
|
||||
jvm()
|
||||
js()
|
||||
native()
|
||||
useCoroutines()
|
||||
dependencies {
|
||||
api(project(":dataforge-meta"))
|
||||
api(kotlin("reflect"))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
readme{
|
||||
maturity = space.kscience.gradle.Maturity.EXPERIMENTAL
|
||||
}
|
||||
|
@ -1,35 +0,0 @@
|
||||
package hep.dataforge.data
|
||||
|
||||
import hep.dataforge.meta.Meta
|
||||
|
||||
/**
|
||||
* A simple data transformation on a data node
|
||||
*/
|
||||
interface Action<in T : Any, out R : Any> {
|
||||
/**
|
||||
* Transform the data in the node, producing a new node. By default it is assumed that all calculations are lazy
|
||||
* so not actual computation is started at this moment
|
||||
*/
|
||||
operator fun invoke(node: DataNode<T>, meta: Meta): DataNode<R>
|
||||
|
||||
/**
|
||||
* Terminal action is the one that could not be invoked lazily and requires some kind of blocking computation to invoke
|
||||
*/
|
||||
val isTerminal: Boolean get() = false
|
||||
}
|
||||
|
||||
/**
|
||||
* Action composition. The result is terminal if one of its parts is terminal
|
||||
*/
|
||||
infix fun <T : Any, I : Any, R : Any> Action<T, I>.then(action: Action<I, R>): Action<T, R> {
|
||||
// TODO introduce composite action and add optimize by adding action to the list
|
||||
return object : Action<T, R> {
|
||||
override fun invoke(node: DataNode<T>, meta: Meta): DataNode<R> {
|
||||
return action(this@then.invoke(node, meta), meta)
|
||||
}
|
||||
|
||||
override val isTerminal: Boolean
|
||||
get() = this@then.isTerminal || action.isTerminal
|
||||
}
|
||||
}
|
||||
|
@ -1,48 +0,0 @@
|
||||
package hep.dataforge.data
|
||||
|
||||
import kotlinx.coroutines.CoroutineScope
|
||||
import kotlinx.coroutines.Job
|
||||
import kotlin.coroutines.CoroutineContext
|
||||
|
||||
/**
|
||||
* A monitor of goal state that could be accessed only form inside the goal
|
||||
*/
|
||||
class CoroutineMonitor : CoroutineContext.Element {
|
||||
override val key: CoroutineContext.Key<*> get() = CoroutineMonitor
|
||||
|
||||
var totalWork: Double = 1.0
|
||||
var workDone: Double = 0.0
|
||||
var status: String = ""
|
||||
|
||||
/**
|
||||
* Mark the goal as started
|
||||
*/
|
||||
fun start() {
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Mark the goal as completed
|
||||
*/
|
||||
fun finish() {
|
||||
workDone = totalWork
|
||||
}
|
||||
|
||||
companion object : CoroutineContext.Key<CoroutineMonitor>
|
||||
}
|
||||
|
||||
class Dependencies(val values: Collection<Job>) : CoroutineContext.Element {
|
||||
override val key: CoroutineContext.Key<*> get() = Dependencies
|
||||
|
||||
companion object : CoroutineContext.Key<Dependencies>
|
||||
}
|
||||
|
||||
val CoroutineContext.monitor: CoroutineMonitor? get() = this[CoroutineMonitor]
|
||||
val CoroutineScope.monitor: CoroutineMonitor? get() = coroutineContext.monitor
|
||||
|
||||
val Job.dependencies: Collection<Job> get() = this[Dependencies]?.values ?: emptyList()
|
||||
|
||||
val Job.totalWork: Double get() = dependencies.sumByDouble { totalWork } + (monitor?.totalWork ?: 0.0)
|
||||
val Job.workDone: Double get() = dependencies.sumByDouble { workDone } + (monitor?.workDone ?: 0.0)
|
||||
val Job.status: String get() = monitor?.status ?: ""
|
||||
val Job.progress: Double get() = workDone / totalWork
|
@ -1,159 +0,0 @@
|
||||
package hep.dataforge.data
|
||||
|
||||
import hep.dataforge.meta.*
|
||||
import kotlinx.coroutines.CoroutineScope
|
||||
import kotlin.coroutines.CoroutineContext
|
||||
import kotlin.coroutines.EmptyCoroutineContext
|
||||
import kotlin.reflect.KClass
|
||||
|
||||
/**
|
||||
* A data element characterized by its meta
|
||||
*/
|
||||
interface Data<out T : Any> : Goal<T>, MetaRepr{
|
||||
/**
|
||||
* Type marker for the data. The type is known before the calculation takes place so it could be checked.
|
||||
*/
|
||||
val type: KClass<out T>
|
||||
/**
|
||||
* Meta for the data
|
||||
*/
|
||||
val meta: Meta
|
||||
|
||||
override fun toMeta(): Meta = buildMeta {
|
||||
"type" put (type.simpleName?:"undefined")
|
||||
if(!meta.isEmpty()) {
|
||||
"meta" put meta
|
||||
}
|
||||
}
|
||||
|
||||
companion object {
|
||||
const val TYPE = "data"
|
||||
|
||||
operator fun <T : Any> invoke(
|
||||
type: KClass<out T>,
|
||||
meta: Meta = EmptyMeta,
|
||||
context: CoroutineContext = EmptyCoroutineContext,
|
||||
dependencies: Collection<Data<*>> = emptyList(),
|
||||
block: suspend CoroutineScope.() -> T
|
||||
): Data<T> = DynamicData(type, meta, context, dependencies, block)
|
||||
|
||||
inline operator fun <reified T : Any> invoke(
|
||||
meta: Meta = EmptyMeta,
|
||||
context: CoroutineContext = EmptyCoroutineContext,
|
||||
dependencies: Collection<Data<*>> = emptyList(),
|
||||
noinline block: suspend CoroutineScope.() -> T
|
||||
): Data<T> = invoke(T::class, meta, context, dependencies, block)
|
||||
|
||||
operator fun <T : Any> invoke(
|
||||
name: String,
|
||||
type: KClass<out T>,
|
||||
meta: Meta = EmptyMeta,
|
||||
context: CoroutineContext = EmptyCoroutineContext,
|
||||
dependencies: Collection<Data<*>> = emptyList(),
|
||||
block: suspend CoroutineScope.() -> T
|
||||
): Data<T> = NamedData(name, invoke(type, meta, context, dependencies, block))
|
||||
|
||||
inline operator fun <reified T : Any> invoke(
|
||||
name: String,
|
||||
meta: Meta = EmptyMeta,
|
||||
context: CoroutineContext = EmptyCoroutineContext,
|
||||
dependencies: Collection<Data<*>> = emptyList(),
|
||||
noinline block: suspend CoroutineScope.() -> T
|
||||
): Data<T> =
|
||||
invoke(name, T::class, meta, context, dependencies, block)
|
||||
|
||||
fun <T : Any> static(value: T, meta: Meta = EmptyMeta): Data<T> =
|
||||
StaticData(value, meta)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
class DynamicData<T : Any>(
|
||||
override val type: KClass<out T>,
|
||||
override val meta: Meta = EmptyMeta,
|
||||
context: CoroutineContext = EmptyCoroutineContext,
|
||||
dependencies: Collection<Data<*>> = emptyList(),
|
||||
block: suspend CoroutineScope.() -> T
|
||||
) : Data<T>, DynamicGoal<T>(context, dependencies, block)
|
||||
|
||||
class StaticData<T : Any>(
|
||||
value: T,
|
||||
override val meta: Meta = EmptyMeta
|
||||
) : Data<T>, StaticGoal<T>(value) {
|
||||
override val type: KClass<out T> get() = value::class
|
||||
}
|
||||
|
||||
class NamedData<out T : Any>(val name: String, data: Data<T>) : Data<T> by data
|
||||
|
||||
fun <T : Any, R : Any> Data<T>.map(
|
||||
outputType: KClass<out R>,
|
||||
coroutineContext: CoroutineContext = EmptyCoroutineContext,
|
||||
meta: Meta = this.meta,
|
||||
block: suspend CoroutineScope.(T) -> R
|
||||
): Data<R> = DynamicData(outputType, meta, coroutineContext, listOf(this)) {
|
||||
block(await(this))
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Create a data pipe
|
||||
*/
|
||||
inline fun <T : Any, reified R : Any> Data<T>.map(
|
||||
coroutineContext: CoroutineContext = EmptyCoroutineContext,
|
||||
meta: Meta = this.meta,
|
||||
noinline block: suspend CoroutineScope.(T) -> R
|
||||
): Data<R> = DynamicData(R::class, meta, coroutineContext, listOf(this)) {
|
||||
block(await(this))
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a joined data.
|
||||
*/
|
||||
inline fun <T : Any, reified R : Any> Collection<Data<T>>.reduce(
|
||||
coroutineContext: CoroutineContext = EmptyCoroutineContext,
|
||||
meta: Meta,
|
||||
noinline block: suspend CoroutineScope.(Collection<T>) -> R
|
||||
): Data<R> = DynamicData(
|
||||
R::class,
|
||||
meta,
|
||||
coroutineContext,
|
||||
this
|
||||
) {
|
||||
block(map { run { it.await(this) } })
|
||||
}
|
||||
|
||||
fun <K, T : Any, R : Any> Map<K, Data<T>>.reduce(
|
||||
outputType: KClass<out R>,
|
||||
coroutineContext: CoroutineContext = EmptyCoroutineContext,
|
||||
meta: Meta,
|
||||
block: suspend CoroutineScope.(Map<K, T>) -> R
|
||||
): DynamicData<R> = DynamicData(
|
||||
outputType,
|
||||
meta,
|
||||
coroutineContext,
|
||||
this.values
|
||||
) {
|
||||
block(mapValues { it.value.await(this) })
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* A joining of multiple data into a single one
|
||||
* @param K type of the map key
|
||||
* @param T type of the input goal
|
||||
* @param R type of the result goal
|
||||
*/
|
||||
inline fun <K, T : Any, reified R : Any> Map<K, Data<T>>.reduce(
|
||||
coroutineContext: CoroutineContext = EmptyCoroutineContext,
|
||||
meta: Meta,
|
||||
noinline block: suspend CoroutineScope.(Map<K, T>) -> R
|
||||
): DynamicData<R> = DynamicData(
|
||||
R::class,
|
||||
meta,
|
||||
coroutineContext,
|
||||
this.values
|
||||
) {
|
||||
block(mapValues { it.value.await(this) })
|
||||
}
|
||||
|
||||
|
@ -1,57 +0,0 @@
|
||||
package hep.dataforge.data
|
||||
|
||||
import hep.dataforge.meta.*
|
||||
import hep.dataforge.names.toName
|
||||
|
||||
|
||||
class DataFilter(override val config: Config) : Specific {
|
||||
/**
|
||||
* A source node for the filter
|
||||
*/
|
||||
var from by string()
|
||||
/**
|
||||
* A target placement for the filtered node
|
||||
*/
|
||||
var to by string()
|
||||
/**
|
||||
* A regular expression pattern for the filter
|
||||
*/
|
||||
var pattern by string(".*")
|
||||
// val prefix by string()
|
||||
// val suffix by string()
|
||||
|
||||
fun isEmpty(): Boolean = config.isEmpty()
|
||||
|
||||
companion object : Specification<DataFilter> {
|
||||
override fun wrap(config: Config): DataFilter = DataFilter(config)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Apply meta-based filter to given data node
|
||||
*/
|
||||
fun <T : Any> DataNode<T>.filter(filter: DataFilter): DataNode<T> {
|
||||
val sourceNode = filter.from?.let { get(it.toName()).node } ?: this@filter
|
||||
val regex = filter.pattern.toRegex()
|
||||
val targetNode = DataTreeBuilder(type).apply {
|
||||
sourceNode.dataSequence().forEach { (name, data) ->
|
||||
if (name.toString().matches(regex)) {
|
||||
this[name] = data
|
||||
}
|
||||
}
|
||||
}
|
||||
return filter.to?.let {
|
||||
DataTreeBuilder(type).apply { this[it.toName()] = targetNode }.build()
|
||||
} ?: targetNode.build()
|
||||
}
|
||||
|
||||
/**
|
||||
* Filter data using [DataFilter] specification
|
||||
*/
|
||||
fun <T : Any> DataNode<T>.filter(filter: Meta): DataNode<T> = filter(DataFilter.wrap(filter))
|
||||
|
||||
/**
|
||||
* Filter data using [DataFilter] builder
|
||||
*/
|
||||
fun <T : Any> DataNode<T>.filter(filterBuilder: DataFilter.() -> Unit): DataNode<T> =
|
||||
filter(DataFilter.build(filterBuilder))
|
@ -1,292 +0,0 @@
|
||||
package hep.dataforge.data
|
||||
|
||||
import hep.dataforge.meta.*
|
||||
import hep.dataforge.names.*
|
||||
import kotlinx.coroutines.CoroutineScope
|
||||
import kotlinx.coroutines.Job
|
||||
import kotlinx.coroutines.launch
|
||||
import kotlin.collections.component1
|
||||
import kotlin.collections.component2
|
||||
import kotlin.collections.set
|
||||
import kotlin.reflect.KClass
|
||||
|
||||
sealed class DataItem<out T : Any> : MetaRepr {
|
||||
abstract val type: KClass<out T>
|
||||
|
||||
abstract val meta: Meta
|
||||
|
||||
class Node<out T : Any>(val node: DataNode<T>) : DataItem<T>() {
|
||||
override val type: KClass<out T> get() = node.type
|
||||
|
||||
override fun toMeta(): Meta = node.toMeta()
|
||||
|
||||
override val meta: Meta get() = node.meta
|
||||
}
|
||||
|
||||
class Leaf<out T : Any>(val data: Data<T>) : DataItem<T>() {
|
||||
override val type: KClass<out T> get() = data.type
|
||||
|
||||
override fun toMeta(): Meta = data.toMeta()
|
||||
|
||||
override val meta: Meta get() = data.meta
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A tree-like data structure grouped into the node. All data inside the node must inherit its type
|
||||
*/
|
||||
interface DataNode<out T : Any> : MetaRepr {
|
||||
|
||||
/**
|
||||
* The minimal common ancestor to all data in the node
|
||||
*/
|
||||
val type: KClass<out T>
|
||||
|
||||
val items: Map<NameToken, DataItem<T>>
|
||||
|
||||
val meta: Meta
|
||||
|
||||
override fun toMeta(): Meta = buildMeta {
|
||||
"type" put (type.simpleName ?: "undefined")
|
||||
"items" put {
|
||||
this@DataNode.items.forEach {
|
||||
it.key.toString() put it.value.toMeta()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
companion object {
|
||||
const val TYPE = "dataNode"
|
||||
|
||||
operator fun <T : Any> invoke(type: KClass<out T>, block: DataTreeBuilder<T>.() -> Unit) =
|
||||
DataTreeBuilder(type).apply(block).build()
|
||||
|
||||
inline operator fun <reified T : Any> invoke(noinline block: DataTreeBuilder<T>.() -> Unit) =
|
||||
DataTreeBuilder(T::class).apply(block).build()
|
||||
|
||||
fun <T : Any> builder(type: KClass<out T>) = DataTreeBuilder(type)
|
||||
}
|
||||
}
|
||||
|
||||
val <T : Any> DataItem<T>?.node: DataNode<T>? get() = (this as? DataItem.Node<T>)?.node
|
||||
val <T : Any> DataItem<T>?.data: Data<T>? get() = (this as? DataItem.Leaf<T>)?.data
|
||||
|
||||
/**
|
||||
* Start computation for all goals in data node and return a job for the whole node
|
||||
*/
|
||||
fun DataNode<*>.launchAll(scope: CoroutineScope): Job = scope.launch {
|
||||
items.values.forEach {
|
||||
when (it) {
|
||||
is DataItem.Node<*> -> it.node.launchAll(scope)
|
||||
is DataItem.Leaf<*> -> it.data.start(scope)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
operator fun <T : Any> DataNode<T>.get(name: Name): DataItem<T>? = when (name.length) {
|
||||
0 -> error("Empty name")
|
||||
1 -> items[name.first()]
|
||||
else -> get(name.first()!!.asName()).node?.get(name.cutFirst())
|
||||
}
|
||||
|
||||
operator fun <T : Any> DataNode<T>.get(name: String): DataItem<T>? = get(name.toName())
|
||||
|
||||
/**
|
||||
* Sequence of all children including nodes
|
||||
*/
|
||||
fun <T : Any> DataNode<T>.asSequence(): Sequence<Pair<Name, DataItem<T>>> = sequence {
|
||||
items.forEach { (head, item) ->
|
||||
yield(head.asName() to item)
|
||||
if (item is DataItem.Node) {
|
||||
val subSequence = item.node.asSequence()
|
||||
.map { (name, data) -> (head.asName() + name) to data }
|
||||
yieldAll(subSequence)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Sequence of data entries
|
||||
*/
|
||||
fun <T : Any> DataNode<T>.dataSequence(): Sequence<Pair<Name, Data<T>>> = sequence {
|
||||
items.forEach { (head, item) ->
|
||||
when (item) {
|
||||
is DataItem.Leaf -> yield(head.asName() to item.data)
|
||||
is DataItem.Node -> {
|
||||
val subSequence = item.node.dataSequence()
|
||||
.map { (name, data) -> (head.asName() + name) to data }
|
||||
yieldAll(subSequence)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
operator fun <T : Any> DataNode<T>.iterator(): Iterator<Pair<Name, DataItem<T>>> = asSequence().iterator()
|
||||
|
||||
class DataTree<out T : Any> internal constructor(
|
||||
override val type: KClass<out T>,
|
||||
override val items: Map<NameToken, DataItem<T>>,
|
||||
override val meta: Meta
|
||||
) : DataNode<T>
|
||||
|
||||
private sealed class DataTreeBuilderItem<out T : Any> {
|
||||
class Node<T : Any>(val tree: DataTreeBuilder<T>) : DataTreeBuilderItem<T>()
|
||||
class Leaf<T : Any>(val value: Data<T>) : DataTreeBuilderItem<T>()
|
||||
}
|
||||
|
||||
/**
|
||||
* A builder for a DataTree.
|
||||
*/
|
||||
@DFBuilder
|
||||
class DataTreeBuilder<T : Any>(val type: KClass<out T>) {
|
||||
private val map = HashMap<NameToken, DataTreeBuilderItem<T>>()
|
||||
|
||||
private var meta = MetaBuilder()
|
||||
|
||||
operator fun set(token: NameToken, node: DataTreeBuilder<out T>) {
|
||||
if (map.containsKey(token)) error("Tree entry with name $token is not empty")
|
||||
map[token] = DataTreeBuilderItem.Node(node)
|
||||
}
|
||||
|
||||
operator fun set(token: NameToken, data: Data<T>) {
|
||||
if (map.containsKey(token)) error("Tree entry with name $token is not empty")
|
||||
map[token] = DataTreeBuilderItem.Leaf(data)
|
||||
}
|
||||
|
||||
private fun buildNode(token: NameToken): DataTreeBuilder<T> {
|
||||
return if (!map.containsKey(token)) {
|
||||
DataTreeBuilder(type).also { map[token] = DataTreeBuilderItem.Node(it) }
|
||||
} else {
|
||||
(map[token] as? DataTreeBuilderItem.Node<T> ?: error("The node with name $token is occupied by leaf")).tree
|
||||
}
|
||||
}
|
||||
|
||||
private fun buildNode(name: Name): DataTreeBuilder<T> {
|
||||
return when (name.length) {
|
||||
0 -> this
|
||||
1 -> buildNode(name.first()!!)
|
||||
else -> buildNode(name.first()!!).buildNode(name.cutFirst())
|
||||
}
|
||||
}
|
||||
|
||||
operator fun set(name: Name, data: Data<T>) {
|
||||
when (name.length) {
|
||||
0 -> error("Can't add data with empty name")
|
||||
1 -> set(name.first()!!, data)
|
||||
2 -> buildNode(name.cutLast())[name.last()!!] = data
|
||||
}
|
||||
}
|
||||
|
||||
operator fun set(name: Name, node: DataTreeBuilder<out T>) {
|
||||
when (name.length) {
|
||||
0 -> error("Can't add data with empty name")
|
||||
1 -> set(name.first()!!, node)
|
||||
2 -> buildNode(name.cutLast())[name.last()!!] = node
|
||||
}
|
||||
}
|
||||
|
||||
operator fun set(name: Name, node: DataNode<T>) = set(name, node.builder())
|
||||
|
||||
operator fun set(name: Name, item: DataItem<T>) = when (item) {
|
||||
is DataItem.Node<T> -> set(name, item.node.builder())
|
||||
is DataItem.Leaf<T> -> set(name, item.data)
|
||||
}
|
||||
|
||||
/**
|
||||
* Append data to node
|
||||
*/
|
||||
infix fun String.put(data: Data<T>) = set(toName(), data)
|
||||
|
||||
/**
|
||||
* Append node
|
||||
*/
|
||||
infix fun String.put(node: DataNode<T>) = set(toName(), node)
|
||||
|
||||
infix fun String.put(item: DataItem<T>) = set(toName(), item)
|
||||
|
||||
/**
|
||||
* Build and append node
|
||||
*/
|
||||
infix fun String.put(block: DataTreeBuilder<T>.() -> Unit) = set(toName(), DataTreeBuilder(type).apply(block))
|
||||
|
||||
|
||||
/**
|
||||
* Update data with given node data and meta with node meta.
|
||||
*/
|
||||
fun update(node: DataNode<T>) {
|
||||
node.dataSequence().forEach {
|
||||
//TODO check if the place is occupied
|
||||
this[it.first] = it.second
|
||||
}
|
||||
meta.update(node.meta)
|
||||
}
|
||||
|
||||
fun meta(block: MetaBuilder.() -> Unit) = meta.apply(block)
|
||||
|
||||
fun meta(meta: Meta) {
|
||||
this.meta = meta.builder()
|
||||
}
|
||||
|
||||
fun build(): DataTree<T> {
|
||||
val resMap = map.mapValues { (_, value) ->
|
||||
when (value) {
|
||||
is DataTreeBuilderItem.Leaf -> DataItem.Leaf(value.value)
|
||||
is DataTreeBuilderItem.Node -> DataItem.Node(value.tree.build())
|
||||
}
|
||||
}
|
||||
return DataTree(type, resMap, meta.seal())
|
||||
}
|
||||
}
|
||||
|
||||
fun <T : Any> DataTreeBuilder<T>.datum(name: Name, data: Data<T>) {
|
||||
this[name] = data
|
||||
}
|
||||
|
||||
fun <T : Any> DataTreeBuilder<T>.datum(name: String, data: Data<T>) {
|
||||
this[name.toName()] = data
|
||||
}
|
||||
|
||||
fun <T : Any> DataTreeBuilder<T>.static(name: Name, data: T, meta: Meta = EmptyMeta) {
|
||||
this[name] = Data.static(data, meta)
|
||||
}
|
||||
|
||||
fun <T : Any> DataTreeBuilder<T>.static(name: Name, data: T, block: MetaBuilder.() -> Unit = {}) {
|
||||
this[name] = Data.static(data, buildMeta(block))
|
||||
}
|
||||
|
||||
fun <T : Any> DataTreeBuilder<T>.static(name: String, data: T, block: MetaBuilder.() -> Unit = {}) {
|
||||
this[name.toName()] = Data.static(data, buildMeta(block))
|
||||
}
|
||||
|
||||
fun <T : Any> DataTreeBuilder<T>.node(name: Name, node: DataNode<T>) {
|
||||
this[name] = node
|
||||
}
|
||||
|
||||
fun <T : Any> DataTreeBuilder<T>.node(name: String, node: DataNode<T>) {
|
||||
this[name.toName()] = node
|
||||
}
|
||||
|
||||
inline fun <reified T : Any> DataTreeBuilder<T>.node(name: Name, noinline block: DataTreeBuilder<T>.() -> Unit) {
|
||||
this[name] = DataNode(T::class, block)
|
||||
}
|
||||
|
||||
inline fun <reified T : Any> DataTreeBuilder<T>.node(name: String, noinline block: DataTreeBuilder<T>.() -> Unit) {
|
||||
this[name.toName()] = DataNode(T::class, block)
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate a mutable builder from this node. Node content is not changed
|
||||
*/
|
||||
fun <T : Any> DataNode<T>.builder(): DataTreeBuilder<T> = DataTreeBuilder(type).apply {
|
||||
dataSequence().forEach { (name, data) -> this[name] = data }
|
||||
}
|
||||
|
||||
fun <T : Any> DataNode<T>.filter(predicate: (Name, Data<T>) -> Boolean): DataNode<T> = DataNode.invoke(type) {
|
||||
dataSequence().forEach { (name, data) ->
|
||||
if (predicate(name, data)) {
|
||||
this[name] = data
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fun <T : Any> DataNode<T>.first(): Data<T>? = dataSequence().first().second
|
@ -1,117 +0,0 @@
|
||||
package hep.dataforge.data
|
||||
|
||||
import kotlinx.coroutines.*
|
||||
import kotlin.coroutines.CoroutineContext
|
||||
import kotlin.coroutines.EmptyCoroutineContext
|
||||
|
||||
interface Goal<out T> {
|
||||
val dependencies: Collection<Goal<*>>
|
||||
/**
|
||||
* Returns current running coroutine if the goal is started
|
||||
*/
|
||||
val result: Deferred<T>?
|
||||
|
||||
/**
|
||||
* Get ongoing computation or start a new one.
|
||||
* Does not guarantee thread safety. In case of multi-thread access, could create orphan computations.
|
||||
*/
|
||||
fun startAsync(scope: CoroutineScope): Deferred<T>
|
||||
|
||||
suspend fun CoroutineScope.await(): T = startAsync(this).await()
|
||||
|
||||
/**
|
||||
* Reset the computation
|
||||
*/
|
||||
fun reset()
|
||||
|
||||
companion object {
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
fun Goal<*>.start(scope: CoroutineScope): Job = startAsync(scope)
|
||||
|
||||
val Goal<*>.isComplete get() = result?.isCompleted ?: false
|
||||
|
||||
suspend fun <T> Goal<T>.await(scope: CoroutineScope): T = scope.await()
|
||||
|
||||
open class StaticGoal<T>(val value: T) : Goal<T> {
|
||||
override val dependencies: Collection<Goal<*>> get() = emptyList()
|
||||
override val result: Deferred<T> = CompletableDeferred(value)
|
||||
|
||||
override fun startAsync(scope: CoroutineScope): Deferred<T> = result
|
||||
|
||||
override fun reset() {
|
||||
//doNothing
|
||||
}
|
||||
}
|
||||
|
||||
open class DynamicGoal<T>(
|
||||
val coroutineContext: CoroutineContext = EmptyCoroutineContext,
|
||||
override val dependencies: Collection<Goal<*>> = emptyList(),
|
||||
val block: suspend CoroutineScope.() -> T
|
||||
) : Goal<T> {
|
||||
|
||||
final override var result: Deferred<T>? = null
|
||||
private set
|
||||
|
||||
/**
|
||||
* Get ongoing computation or start a new one.
|
||||
* Does not guarantee thread safety. In case of multi-thread access, could create orphan computations.
|
||||
*/
|
||||
override fun startAsync(scope: CoroutineScope): Deferred<T> {
|
||||
val startedDependencies = this.dependencies.map { goal ->
|
||||
goal.startAsync(scope)
|
||||
}
|
||||
return result ?: scope.async(coroutineContext + CoroutineMonitor() + Dependencies(startedDependencies)) {
|
||||
startedDependencies.forEach { deferred ->
|
||||
deferred.invokeOnCompletion { error ->
|
||||
if (error != null) cancel(CancellationException("Dependency $deferred failed with error: ${error.message}"))
|
||||
}
|
||||
}
|
||||
block()
|
||||
}.also { result = it }
|
||||
}
|
||||
|
||||
/**
|
||||
* Reset the computation
|
||||
*/
|
||||
override fun reset() {
|
||||
result?.cancel()
|
||||
result = null
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a one-to-one goal based on existing goal
|
||||
*/
|
||||
fun <T, R> Goal<T>.map(
|
||||
coroutineContext: CoroutineContext = EmptyCoroutineContext,
|
||||
block: suspend CoroutineScope.(T) -> R
|
||||
): Goal<R> = DynamicGoal(coroutineContext, listOf(this)) {
|
||||
block(await(this))
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a joining goal.
|
||||
*/
|
||||
fun <T, R> Collection<Goal<T>>.reduce(
|
||||
coroutineContext: CoroutineContext = EmptyCoroutineContext,
|
||||
block: suspend CoroutineScope.(Collection<T>) -> R
|
||||
): Goal<R> = DynamicGoal(coroutineContext, this) {
|
||||
block(map { run { it.await(this) } })
|
||||
}
|
||||
|
||||
/**
|
||||
* A joining goal for a map
|
||||
* @param K type of the map key
|
||||
* @param T type of the input goal
|
||||
* @param R type of the result goal
|
||||
*/
|
||||
fun <K, T, R> Map<K, Goal<T>>.reduce(
|
||||
coroutineContext: CoroutineContext = EmptyCoroutineContext,
|
||||
block: suspend CoroutineScope.(Map<K, T>) -> R
|
||||
): Goal<R> = DynamicGoal(coroutineContext, this.values) {
|
||||
block(mapValues { it.value.await(this) })
|
||||
}
|
||||
|
@ -1,68 +0,0 @@
|
||||
/*
|
||||
* Copyright 2015 Alexander Nozik.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package hep.dataforge.data
|
||||
|
||||
import hep.dataforge.meta.Meta
|
||||
import hep.dataforge.meta.get
|
||||
import hep.dataforge.meta.string
|
||||
|
||||
interface GroupRule {
|
||||
operator fun <T : Any> invoke(node: DataNode<T>): Map<String, DataNode<T>>
|
||||
|
||||
companion object{
|
||||
/**
|
||||
* Create grouping rule that creates groups for different values of value
|
||||
* field with name [key]
|
||||
*
|
||||
* @param key
|
||||
* @param defaultTagValue
|
||||
* @return
|
||||
*/
|
||||
fun byValue(key: String, defaultTagValue: String): GroupRule = object :
|
||||
GroupRule {
|
||||
override fun <T : Any> invoke(node: DataNode<T>): Map<String, DataNode<T>> {
|
||||
val map = HashMap<String, DataTreeBuilder<T>>()
|
||||
|
||||
node.dataSequence().forEach { (name, data) ->
|
||||
val tagValue = data.meta[key]?.string ?: defaultTagValue
|
||||
map.getOrPut(tagValue) { DataNode.builder(node.type) }[name] = data
|
||||
}
|
||||
|
||||
return map.mapValues { it.value.build() }
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// @ValueDef(key = "byValue", required = true, info = "The name of annotation value by which grouping should be made")
|
||||
// @ValueDef(
|
||||
// key = "defaultValue",
|
||||
// def = "default",
|
||||
// info = "Default value which should be used for content in which the grouping value is not presented"
|
||||
// )
|
||||
fun byMeta(config: Meta): GroupRule {
|
||||
//TODO expand grouping options
|
||||
return config["byValue"]?.string?.let {
|
||||
byValue(
|
||||
it,
|
||||
config["defaultValue"]?.string ?: "default"
|
||||
)
|
||||
}
|
||||
?: object : GroupRule {
|
||||
override fun <T : Any> invoke(node: DataNode<T>): Map<String, DataNode<T>> = mapOf("" to node)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@ -1,76 +0,0 @@
|
||||
package hep.dataforge.data
|
||||
|
||||
import hep.dataforge.meta.*
|
||||
import hep.dataforge.names.Name
|
||||
import kotlin.reflect.KClass
|
||||
|
||||
/**
|
||||
* Action environment includes data name, data meta and action configuration meta
|
||||
*/
|
||||
data class ActionEnv(
|
||||
val name: Name,
|
||||
val meta: Meta,
|
||||
val actionMeta: Meta
|
||||
)
|
||||
|
||||
|
||||
/**
|
||||
* Action environment
|
||||
*/
|
||||
@DFBuilder
|
||||
class MapActionBuilder<T, R>(var name: Name, var meta: MetaBuilder, val actionMeta: Meta) {
|
||||
lateinit var result: suspend ActionEnv.(T) -> R
|
||||
|
||||
/**
|
||||
* Calculate the result of goal
|
||||
*/
|
||||
fun result(f: suspend ActionEnv.(T) -> R) {
|
||||
result = f;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
class MapAction<T : Any, out R : Any>(
|
||||
val inputType: KClass<out T>,
|
||||
val outputType: KClass<out R>,
|
||||
private val block: MapActionBuilder<T, R>.() -> Unit
|
||||
) : Action<T, R> {
|
||||
|
||||
override fun invoke(node: DataNode<T>, meta: Meta): DataNode<R> {
|
||||
node.ensureType(inputType)
|
||||
|
||||
return DataNode.invoke(outputType) {
|
||||
node.dataSequence().forEach { (name, data) ->
|
||||
/*
|
||||
* Creating a new environment for action using **old** name, old meta and task meta
|
||||
*/
|
||||
val env = ActionEnv(name, data.meta, meta)
|
||||
|
||||
//applying transformation from builder
|
||||
val builder = MapActionBuilder<T, R>(
|
||||
name,
|
||||
data.meta.builder(), // using data meta
|
||||
meta
|
||||
).apply(block)
|
||||
|
||||
//getting new name
|
||||
val newName = builder.name
|
||||
|
||||
//getting new meta
|
||||
val newMeta = builder.meta.seal()
|
||||
|
||||
val newData = data.map(outputType, meta = newMeta) { builder.result(env, it) }
|
||||
//setting the data node
|
||||
this[newName] = newData
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
inline fun <reified T : Any, reified R : Any> DataNode<T>.map(
|
||||
meta: Meta,
|
||||
noinline action: MapActionBuilder<in T, out R>.() -> Unit
|
||||
): DataNode<R> = MapAction(T::class, R::class, action).invoke(this, meta)
|
||||
|
||||
|
||||
|
@ -1,107 +0,0 @@
|
||||
package hep.dataforge.data
|
||||
|
||||
import hep.dataforge.meta.Meta
|
||||
import hep.dataforge.meta.MetaBuilder
|
||||
import hep.dataforge.names.Name
|
||||
import hep.dataforge.names.toName
|
||||
import kotlin.reflect.KClass
|
||||
|
||||
|
||||
class JoinGroup<T : Any, R : Any>(var name: String, internal val node: DataNode<T>) {
|
||||
|
||||
var meta: MetaBuilder = MetaBuilder()
|
||||
|
||||
lateinit var result: suspend ActionEnv.(Map<Name, T>) -> R
|
||||
|
||||
fun result(f: suspend ActionEnv.(Map<Name, T>) -> R) {
|
||||
this.result = f;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
class ReduceGroupBuilder<T : Any, R : Any>(val actionMeta: Meta) {
|
||||
private val groupRules: MutableList<(DataNode<T>) -> List<JoinGroup<T, R>>> = ArrayList();
|
||||
|
||||
/**
|
||||
* introduce grouping by value name
|
||||
*/
|
||||
fun byValue(tag: String, defaultTag: String = "@default", action: JoinGroup<T, R>.() -> Unit) {
|
||||
groupRules += { node ->
|
||||
GroupRule.byValue(tag, defaultTag).invoke(node).map {
|
||||
JoinGroup<T, R>(it.key, it.value).apply(action)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a single fixed group to grouping rules
|
||||
*/
|
||||
fun group(groupName: String, filter: DataFilter, action: JoinGroup<T, R>.() -> Unit) {
|
||||
groupRules += { node ->
|
||||
listOf(
|
||||
JoinGroup<T, R>(groupName, node.filter(filter)).apply(action)
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
fun group(groupName: String, filter: (Name, Data<T>) -> Boolean, action: JoinGroup<T, R>.() -> Unit) {
|
||||
groupRules += { node ->
|
||||
listOf(
|
||||
JoinGroup<T, R>(groupName, node.filter(filter)).apply(action)
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Apply transformation to the whole node
|
||||
*/
|
||||
fun result(resultName: String, f: suspend ActionEnv.(Map<Name, T>) -> R) {
|
||||
groupRules += { node ->
|
||||
listOf(JoinGroup<T, R>(resultName, node).apply { result(f) })
|
||||
}
|
||||
}
|
||||
|
||||
internal fun buildGroups(input: DataNode<T>): List<JoinGroup<T, R>> {
|
||||
return groupRules.flatMap { it.invoke(input) }
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* The same rules as for KPipe
|
||||
*/
|
||||
class ReduceAction<T : Any, R : Any>(
|
||||
val inputType: KClass<out T>,
|
||||
val outputType: KClass<out R>,
|
||||
private val action: ReduceGroupBuilder<T, R>.() -> Unit
|
||||
) : Action<T, R> {
|
||||
|
||||
override fun invoke(node: DataNode<T>, meta: Meta): DataNode<R> {
|
||||
node.ensureType(inputType)
|
||||
return DataNode.invoke(outputType) {
|
||||
ReduceGroupBuilder<T, R>(meta).apply(action).buildGroups(node).forEach { group ->
|
||||
|
||||
//val laminate = Laminate(group.meta, meta)
|
||||
|
||||
val dataMap = group.node.dataSequence().associate { it }
|
||||
|
||||
val groupName: String = group.name
|
||||
|
||||
val groupMeta = group.meta
|
||||
|
||||
val env = ActionEnv(groupName.toName(), groupMeta, meta)
|
||||
|
||||
val res: DynamicData<R> = dataMap.reduce(
|
||||
outputType,
|
||||
meta = groupMeta
|
||||
) { group.result.invoke(env, it) }
|
||||
|
||||
set(env.name, res)
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
operator fun <T> Map<Name, T>.get(name: String) = get(name.toName())
|
@ -1,64 +0,0 @@
|
||||
package hep.dataforge.data
|
||||
|
||||
import hep.dataforge.meta.Laminate
|
||||
import hep.dataforge.meta.Meta
|
||||
import hep.dataforge.meta.MetaBuilder
|
||||
import hep.dataforge.meta.builder
|
||||
import hep.dataforge.names.Name
|
||||
import hep.dataforge.names.toName
|
||||
import kotlin.collections.set
|
||||
import kotlin.reflect.KClass
|
||||
|
||||
|
||||
class FragmentRule<T : Any, R : Any>(val name: Name, var meta: MetaBuilder) {
|
||||
lateinit var result: suspend (T) -> R
|
||||
|
||||
fun result(f: suspend (T) -> R) {
|
||||
result = f;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
class SplitBuilder<T : Any, R : Any>(val name: Name, val meta: Meta) {
|
||||
internal val fragments: MutableMap<Name, FragmentRule<T, R>.() -> Unit> = HashMap()
|
||||
|
||||
/**
|
||||
* Add new fragment building rule. If the framgent not defined, result won't be available even if it is present in the map
|
||||
* @param name the name of a fragment
|
||||
* @param rule the rule to transform fragment name and meta using
|
||||
*/
|
||||
fun fragment(name: String, rule: FragmentRule<T, R>.() -> Unit) {
|
||||
fragments[name.toName()] = rule
|
||||
}
|
||||
}
|
||||
|
||||
class SplitAction<T : Any, R : Any>(
|
||||
val inputType: KClass<out T>,
|
||||
val outputType: KClass<out R>,
|
||||
private val action: SplitBuilder<T, R>.() -> Unit
|
||||
) : Action<T, R> {
|
||||
|
||||
override fun invoke(node: DataNode<T>, meta: Meta): DataNode<R> {
|
||||
node.ensureType(inputType)
|
||||
|
||||
return DataNode.invoke(outputType) {
|
||||
node.dataSequence().forEach { (name, data) ->
|
||||
|
||||
val laminate = Laminate(data.meta, meta)
|
||||
|
||||
val split = SplitBuilder<T, R>(name, data.meta).apply(action)
|
||||
|
||||
|
||||
// apply individual fragment rules to result
|
||||
split.fragments.forEach { (fragmentName, rule) ->
|
||||
val env = FragmentRule<T, R>(fragmentName, laminate.builder())
|
||||
|
||||
rule(env)
|
||||
|
||||
val res = data.map(outputType, meta = env.meta) { env.result(it) }
|
||||
set(env.name, res)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@ -1,73 +0,0 @@
|
||||
package hep.dataforge.data
|
||||
|
||||
import hep.dataforge.meta.Meta
|
||||
import hep.dataforge.names.NameToken
|
||||
import kotlinx.coroutines.CoroutineScope
|
||||
import kotlinx.coroutines.Deferred
|
||||
import kotlin.reflect.KClass
|
||||
|
||||
fun <R : Any, T : R> Data<T>.upcast(type: KClass<out R>): Data<R> {
|
||||
return object : Data<R> by this {
|
||||
override val type: KClass<out R> = type
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Safe upcast a [Data] to a supertype
|
||||
*/
|
||||
inline fun <reified R : Any, T : R> Data<T>.upcast(): Data<R> = upcast(R::class)
|
||||
|
||||
/**
|
||||
* Check if node could be safely cast to given class
|
||||
*/
|
||||
expect fun <R : Any> DataNode<*>.canCast(type: KClass<out R>): Boolean
|
||||
|
||||
/**
|
||||
* Check if data could be safely cast to given class
|
||||
*/
|
||||
expect fun <R : Any> Data<*>.canCast(type: KClass<out R>): Boolean
|
||||
|
||||
fun <R : Any> DataItem<*>.canCast(type: KClass<out R>): Boolean = when (this) {
|
||||
is DataItem.Node -> node.canCast(type)
|
||||
is DataItem.Leaf -> data.canCast(type)
|
||||
}
|
||||
|
||||
/**
|
||||
* Unsafe cast of data node
|
||||
*/
|
||||
@Suppress("UNCHECKED_CAST")
|
||||
fun <R : Any> Data<*>.cast(type: KClass<out R>): Data<R> {
|
||||
return object : Data<R> {
|
||||
override val meta: Meta get() = this@cast.meta
|
||||
override val dependencies: Collection<Goal<*>> get() = this@cast.dependencies
|
||||
override val result: Deferred<R>? get() = this@cast.result as Deferred<R>
|
||||
override fun startAsync(scope: CoroutineScope): Deferred<R> = this@cast.startAsync(scope) as Deferred<R>
|
||||
override fun reset() = this@cast.reset()
|
||||
override val type: KClass<out R> = type
|
||||
}
|
||||
}
|
||||
|
||||
inline fun <reified R : Any> Data<*>.cast(): Data<R> = cast(R::class)
|
||||
|
||||
@Suppress("UNCHECKED_CAST")
|
||||
fun <R : Any> DataNode<*>.cast(type: KClass<out R>): DataNode<R> {
|
||||
return object : DataNode<R> {
|
||||
override val meta: Meta get() = this@cast.meta
|
||||
override val type: KClass<out R> = type
|
||||
override val items: Map<NameToken, DataItem<R>> get() = this@cast.items as Map<NameToken, DataItem<R>>
|
||||
}
|
||||
}
|
||||
|
||||
inline fun <reified R : Any> DataNode<*>.cast(): DataNode<R> = cast(R::class)
|
||||
|
||||
/**
|
||||
* Check that node is compatible with given type meaning that each element could be cast to the type
|
||||
*/
|
||||
fun <T : Any> DataNode<*>.ensureType(type: KClass<out T>) {
|
||||
if (!canCast(type)) {
|
||||
error("$type expected, but $type received")
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
//expect fun <T : Any, R : Any> DataNode<T>.cast(type: KClass<out R>): DataNode<R>
|
@ -0,0 +1,65 @@
|
||||
package space.kscience.dataforge.actions
|
||||
|
||||
import kotlinx.coroutines.launch
|
||||
import space.kscience.dataforge.data.*
|
||||
import space.kscience.dataforge.meta.Meta
|
||||
import space.kscience.dataforge.misc.DFInternal
|
||||
import space.kscience.dataforge.names.Name
|
||||
import space.kscience.dataforge.names.startsWith
|
||||
import kotlin.reflect.KType
|
||||
|
||||
/**
|
||||
* Remove all values with keys starting with [name]
|
||||
*/
|
||||
internal fun MutableMap<Name, *>.removeWhatStartsWith(name: Name) {
|
||||
val toRemove = keys.filter { it.startsWith(name) }
|
||||
toRemove.forEach(::remove)
|
||||
}
|
||||
|
||||
/**
|
||||
* An action that caches results on-demand and recalculates them on source push
|
||||
*/
|
||||
public abstract class AbstractAction<in T : Any, R : Any>(
|
||||
public val outputType: KType,
|
||||
) : Action<T, R> {
|
||||
|
||||
/**
|
||||
* Generate initial content of the output
|
||||
*/
|
||||
protected abstract fun DataSetBuilder<R>.generate(
|
||||
data: DataSet<T>,
|
||||
meta: Meta,
|
||||
)
|
||||
|
||||
/**
|
||||
* Update part of the data set when given [updateKey] is triggered by the source
|
||||
*/
|
||||
protected open fun DataSourceBuilder<R>.update(
|
||||
dataSet: DataSet<T>,
|
||||
meta: Meta,
|
||||
updateKey: Name,
|
||||
) {
|
||||
// By default, recalculate the whole dataset
|
||||
generate(dataSet, meta)
|
||||
}
|
||||
|
||||
@OptIn(DFInternal::class)
|
||||
override fun execute(
|
||||
dataSet: DataSet<T>,
|
||||
meta: Meta,
|
||||
): DataSet<R> = if (dataSet is DataSource) {
|
||||
DataSource(outputType, dataSet){
|
||||
generate(dataSet, meta)
|
||||
|
||||
launch {
|
||||
dataSet.updates.collect { name ->
|
||||
update(dataSet, meta, name)
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
DataTree<R>(outputType) {
|
||||
generate(dataSet, meta)
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,40 @@
|
||||
package space.kscience.dataforge.actions
|
||||
|
||||
import space.kscience.dataforge.data.DataSet
|
||||
import space.kscience.dataforge.meta.Meta
|
||||
import space.kscience.dataforge.misc.DFExperimental
|
||||
|
||||
/**
|
||||
* A simple data transformation on a data node. Actions should avoid doing actual dependency evaluation in [execute].
|
||||
*/
|
||||
public interface Action<in T : Any, out R : Any> {
|
||||
|
||||
/**
|
||||
* Transform the data in the node, producing a new node. By default, it is assumed that all calculations are lazy
|
||||
* so not actual computation is started at this moment.
|
||||
*/
|
||||
public fun execute(dataSet: DataSet<T>, meta: Meta = Meta.EMPTY): DataSet<R>
|
||||
|
||||
public companion object
|
||||
}
|
||||
|
||||
/**
|
||||
* Action composition. The result is terminal if one of its parts is terminal
|
||||
*/
|
||||
public infix fun <T : Any, I : Any, R : Any> Action<T, I>.then(action: Action<I, R>): Action<T, R> {
|
||||
// TODO introduce composite action and add optimize by adding action to the list
|
||||
return object : Action<T, R> {
|
||||
|
||||
override fun execute(
|
||||
dataSet: DataSet<T>,
|
||||
meta: Meta,
|
||||
): DataSet<R> = action.execute(this@then.execute(dataSet, meta), meta)
|
||||
}
|
||||
}
|
||||
|
||||
@DFExperimental
|
||||
public operator fun <T : Any, R : Any> Action<T, R>.invoke(
|
||||
dataSet: DataSet<T>,
|
||||
meta: Meta = Meta.EMPTY,
|
||||
): DataSet<R> = execute(dataSet, meta)
|
||||
|
@ -0,0 +1,106 @@
|
||||
package space.kscience.dataforge.actions
|
||||
|
||||
import space.kscience.dataforge.data.*
|
||||
import space.kscience.dataforge.meta.Meta
|
||||
import space.kscience.dataforge.meta.MutableMeta
|
||||
import space.kscience.dataforge.meta.seal
|
||||
import space.kscience.dataforge.meta.toMutableMeta
|
||||
import space.kscience.dataforge.misc.DFBuilder
|
||||
import space.kscience.dataforge.misc.DFExperimental
|
||||
import space.kscience.dataforge.misc.DFInternal
|
||||
import space.kscience.dataforge.names.Name
|
||||
import kotlin.reflect.KType
|
||||
import kotlin.reflect.typeOf
|
||||
|
||||
/**
|
||||
* Action environment includes data name, data meta and action configuration meta
|
||||
*/
|
||||
public data class ActionEnv(
|
||||
val name: Name,
|
||||
val meta: Meta,
|
||||
val actionMeta: Meta,
|
||||
)
|
||||
|
||||
/**
|
||||
* Action environment
|
||||
*/
|
||||
@DFBuilder
|
||||
public class MapActionBuilder<T, R>(
|
||||
public var name: Name,
|
||||
public var meta: MutableMeta,
|
||||
public val actionMeta: Meta,
|
||||
@PublishedApi internal var outputType: KType,
|
||||
) {
|
||||
|
||||
public lateinit var result: suspend ActionEnv.(T) -> R
|
||||
|
||||
/**
|
||||
* Set unsafe [outputType] for the resulting data. Be sure that it is correct.
|
||||
*/
|
||||
public fun <R1 : R> result(outputType: KType, f: suspend ActionEnv.(T) -> R1) {
|
||||
this.outputType = outputType
|
||||
result = f;
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate the result of goal
|
||||
*/
|
||||
public inline fun <reified R1 : R> result(noinline f: suspend ActionEnv.(T) -> R1) {
|
||||
outputType = typeOf<R1>()
|
||||
result = f;
|
||||
}
|
||||
}
|
||||
|
||||
@PublishedApi
|
||||
internal class MapAction<in T : Any, R : Any>(
|
||||
outputType: KType,
|
||||
private val block: MapActionBuilder<T, R>.() -> Unit,
|
||||
) : AbstractAction<T, R>(outputType) {
|
||||
|
||||
private fun DataSetBuilder<R>.mapOne(name: Name, data: Data<T>, meta: Meta) {
|
||||
// Creating a new environment for action using **old** name, old meta and task meta
|
||||
val env = ActionEnv(name, data.meta, meta)
|
||||
|
||||
//applying transformation from builder
|
||||
val builder = MapActionBuilder<T, R>(
|
||||
name,
|
||||
data.meta.toMutableMeta(), // using data meta
|
||||
meta,
|
||||
outputType
|
||||
).apply(block)
|
||||
|
||||
//getting new name
|
||||
val newName = builder.name
|
||||
|
||||
//getting new meta
|
||||
val newMeta = builder.meta.seal()
|
||||
|
||||
@OptIn(DFInternal::class)
|
||||
val newData = Data(builder.outputType, newMeta, dependencies = listOf(data)) {
|
||||
builder.result(env, data.await())
|
||||
}
|
||||
//setting the data node
|
||||
data(newName, newData)
|
||||
}
|
||||
|
||||
override fun DataSetBuilder<R>.generate(data: DataSet<T>, meta: Meta) {
|
||||
data.forEach { mapOne(it.name, it.data, meta) }
|
||||
}
|
||||
|
||||
override fun DataSourceBuilder<R>.update(dataSet: DataSet<T>, meta: Meta, updateKey: Name) {
|
||||
remove(updateKey)
|
||||
dataSet[updateKey]?.let { mapOne(updateKey, it, meta) }
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* A one-to-one mapping action
|
||||
*/
|
||||
@DFExperimental
|
||||
@Suppress("FunctionName")
|
||||
public inline fun <T : Any, reified R : Any> Action.Companion.map(
|
||||
noinline builder: MapActionBuilder<T, R>.() -> Unit,
|
||||
): Action<T, R> = MapAction(typeOf<R>(), builder)
|
||||
|
||||
|
@ -0,0 +1,117 @@
|
||||
package space.kscience.dataforge.actions
|
||||
|
||||
import space.kscience.dataforge.data.*
|
||||
import space.kscience.dataforge.meta.Meta
|
||||
import space.kscience.dataforge.meta.MutableMeta
|
||||
import space.kscience.dataforge.misc.DFBuilder
|
||||
import space.kscience.dataforge.misc.DFExperimental
|
||||
import space.kscience.dataforge.misc.DFInternal
|
||||
import space.kscience.dataforge.names.Name
|
||||
import space.kscience.dataforge.names.parseAsName
|
||||
import kotlin.reflect.KType
|
||||
import kotlin.reflect.typeOf
|
||||
|
||||
|
||||
public class JoinGroup<T : Any, R : Any>(
|
||||
public var name: String,
|
||||
internal val set: DataSet<T>,
|
||||
@PublishedApi internal var outputType: KType,
|
||||
) {
|
||||
|
||||
public var meta: MutableMeta = MutableMeta()
|
||||
|
||||
public lateinit var result: suspend ActionEnv.(Map<Name, ValueWithMeta<T>>) -> R
|
||||
|
||||
internal fun <R1 : R> result(outputType: KType, f: suspend ActionEnv.(Map<Name, ValueWithMeta<T>>) -> R1) {
|
||||
this.outputType = outputType
|
||||
this.result = f;
|
||||
}
|
||||
|
||||
public inline fun <reified R1 : R> result(noinline f: suspend ActionEnv.(Map<Name, ValueWithMeta<T>>) -> R1) {
|
||||
outputType = typeOf<R1>()
|
||||
this.result = f;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@DFBuilder
|
||||
public class ReduceGroupBuilder<T : Any, R : Any>(
|
||||
public val actionMeta: Meta,
|
||||
private val outputType: KType,
|
||||
) {
|
||||
private val groupRules: MutableList<(DataSet<T>) -> List<JoinGroup<T, R>>> = ArrayList();
|
||||
|
||||
/**
|
||||
* introduce grouping by meta value
|
||||
*/
|
||||
public fun byValue(tag: String, defaultTag: String = "@default", action: JoinGroup<T, R>.() -> Unit) {
|
||||
groupRules += { node ->
|
||||
GroupRule.byMetaValue(tag, defaultTag).gather(node).map {
|
||||
JoinGroup<T, R>(it.key, it.value, outputType).apply(action)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public fun group(
|
||||
groupName: String,
|
||||
predicate: (Name, Meta) -> Boolean,
|
||||
action: JoinGroup<T, R>.() -> Unit,
|
||||
) {
|
||||
groupRules += { source ->
|
||||
listOf(
|
||||
JoinGroup<T, R>(groupName, source.filter(predicate), outputType).apply(action)
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Apply transformation to the whole node
|
||||
*/
|
||||
public fun result(resultName: String, f: suspend ActionEnv.(Map<Name, ValueWithMeta<T>>) -> R) {
|
||||
groupRules += { node ->
|
||||
listOf(JoinGroup<T, R>(resultName, node, outputType).apply { result(outputType, f) })
|
||||
}
|
||||
}
|
||||
|
||||
internal fun buildGroups(input: DataSet<T>): List<JoinGroup<T, R>> =
|
||||
groupRules.flatMap { it.invoke(input) }
|
||||
|
||||
}
|
||||
|
||||
@PublishedApi
|
||||
internal class ReduceAction<T : Any, R : Any>(
|
||||
outputType: KType,
|
||||
private val action: ReduceGroupBuilder<T, R>.() -> Unit,
|
||||
) : AbstractAction<T, R>(outputType) {
|
||||
//TODO optimize reduction. Currently, the whole action recalculates on push
|
||||
|
||||
override fun DataSetBuilder<R>.generate(data: DataSet<T>, meta: Meta) {
|
||||
ReduceGroupBuilder<T, R>(meta, outputType).apply(action).buildGroups(data).forEach { group ->
|
||||
val dataFlow: Map<Name, Data<T>> = group.set.asSequence().fold(HashMap()) { acc, value ->
|
||||
acc.apply {
|
||||
acc[value.name] = value.data
|
||||
}
|
||||
}
|
||||
|
||||
val groupName: String = group.name
|
||||
|
||||
val groupMeta = group.meta
|
||||
|
||||
val env = ActionEnv(groupName.parseAsName(), groupMeta, meta)
|
||||
@OptIn(DFInternal::class) val res: Data<R> = dataFlow.reduceToData(
|
||||
group.outputType,
|
||||
meta = groupMeta
|
||||
) { group.result.invoke(env, it) }
|
||||
|
||||
data(env.name, res)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A one-to-one mapping action
|
||||
*/
|
||||
@DFExperimental
|
||||
public inline fun <reified T : Any, reified R : Any> Action.Companion.reduce(
|
||||
noinline builder: ReduceGroupBuilder<T, R>.() -> Unit,
|
||||
): Action<T, R> = ReduceAction(typeOf<R>(), builder)
|
@ -0,0 +1,92 @@
|
||||
package space.kscience.dataforge.actions
|
||||
|
||||
import space.kscience.dataforge.data.*
|
||||
import space.kscience.dataforge.meta.Laminate
|
||||
import space.kscience.dataforge.meta.Meta
|
||||
import space.kscience.dataforge.meta.MutableMeta
|
||||
import space.kscience.dataforge.meta.toMutableMeta
|
||||
import space.kscience.dataforge.misc.DFExperimental
|
||||
import space.kscience.dataforge.names.Name
|
||||
import space.kscience.dataforge.names.parseAsName
|
||||
import kotlin.collections.set
|
||||
import kotlin.reflect.KType
|
||||
import kotlin.reflect.typeOf
|
||||
|
||||
|
||||
public class SplitBuilder<T : Any, R : Any>(public val name: Name, public val meta: Meta) {
|
||||
|
||||
public class FragmentRule<T : Any, R : Any>(
|
||||
public val name: Name,
|
||||
public var meta: MutableMeta,
|
||||
@PublishedApi internal var outputType: KType,
|
||||
) {
|
||||
public lateinit var result: suspend (T) -> R
|
||||
|
||||
public inline fun <reified R1 : R> result(noinline f: suspend (T) -> R1) {
|
||||
this.outputType = typeOf<R1>()
|
||||
result = f;
|
||||
}
|
||||
}
|
||||
|
||||
internal val fragments: MutableMap<Name, FragmentRule<T, R>.() -> Unit> = HashMap()
|
||||
|
||||
/**
|
||||
* Add new fragment building rule. If the framgent not defined, result won't be available even if it is present in the map
|
||||
* @param name the name of a fragment
|
||||
* @param rule the rule to transform fragment name and meta using
|
||||
*/
|
||||
public fun fragment(name: String, rule: FragmentRule<T, R>.() -> Unit) {
|
||||
fragments[name.parseAsName()] = rule
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Action that splits each incoming element into a number of fragments defined in builder
|
||||
*/
|
||||
@PublishedApi
|
||||
internal class SplitAction<T : Any, R : Any>(
|
||||
outputType: KType,
|
||||
private val action: SplitBuilder<T, R>.() -> Unit,
|
||||
) : AbstractAction<T, R>(outputType) {
|
||||
|
||||
private fun DataSetBuilder<R>.splitOne(name: Name, data: Data<T>, meta: Meta) {
|
||||
val laminate = Laminate(data.meta, meta)
|
||||
|
||||
val split = SplitBuilder<T, R>(name, data.meta).apply(action)
|
||||
|
||||
|
||||
// apply individual fragment rules to result
|
||||
split.fragments.forEach { (fragmentName, rule) ->
|
||||
val env = SplitBuilder.FragmentRule<T, R>(
|
||||
fragmentName,
|
||||
laminate.toMutableMeta(),
|
||||
outputType
|
||||
).apply(rule)
|
||||
//data.map<R>(outputType, meta = env.meta) { env.result(it) }.named(fragmentName)
|
||||
|
||||
data(
|
||||
fragmentName,
|
||||
@Suppress("OPT_IN_USAGE") Data(outputType, meta = env.meta, dependencies = listOf(data)) {
|
||||
env.result(data.await())
|
||||
}
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
override fun DataSetBuilder<R>.generate(data: DataSet<T>, meta: Meta) {
|
||||
data.forEach { splitOne(it.name, it.data, meta) }
|
||||
}
|
||||
|
||||
override fun DataSourceBuilder<R>.update(dataSet: DataSet<T>, meta: Meta, updateKey: Name) {
|
||||
remove(updateKey)
|
||||
dataSet[updateKey]?.let { splitOne(updateKey, it, meta) }
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Action that splits each incoming element into a number of fragments defined in builder
|
||||
*/
|
||||
@DFExperimental
|
||||
public inline fun <T : Any, reified R : Any> Action.Companion.split(
|
||||
noinline builder: SplitBuilder<T, R>.() -> Unit,
|
||||
): Action<T, R> = SplitAction(typeOf<R>(), builder)
|
@ -0,0 +1,57 @@
|
||||
package space.kscience.dataforge.data
|
||||
|
||||
import kotlinx.coroutines.CoroutineScope
|
||||
import kotlinx.coroutines.Job
|
||||
import space.kscience.dataforge.misc.DFExperimental
|
||||
import kotlin.coroutines.CoroutineContext
|
||||
|
||||
|
||||
/**
|
||||
* A monitor of goal state that could be accessed only form inside the goal
|
||||
*/
|
||||
@DFExperimental
|
||||
public class CoroutineMonitor : CoroutineContext.Element {
|
||||
override val key: CoroutineContext.Key<*> get() = CoroutineMonitor
|
||||
|
||||
public var totalWork: Double = 1.0
|
||||
public var workDone: Double = 0.0
|
||||
public var status: String = ""
|
||||
|
||||
/**
|
||||
* Mark the goal as started
|
||||
*/
|
||||
public fun start() {
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Mark the goal as completed
|
||||
*/
|
||||
public fun finish() {
|
||||
workDone = totalWork
|
||||
}
|
||||
|
||||
public companion object : CoroutineContext.Key<CoroutineMonitor>
|
||||
}
|
||||
|
||||
public class Dependencies(public val values: Collection<Job>) : CoroutineContext.Element {
|
||||
override val key: CoroutineContext.Key<*> get() = Dependencies
|
||||
|
||||
public companion object : CoroutineContext.Key<Dependencies>
|
||||
}
|
||||
|
||||
@DFExperimental
|
||||
public val CoroutineContext.monitor: CoroutineMonitor? get() = this[CoroutineMonitor]
|
||||
@DFExperimental
|
||||
public val CoroutineScope.monitor: CoroutineMonitor? get() = coroutineContext.monitor
|
||||
|
||||
public val Job.dependencies: Collection<Job> get() = this[Dependencies]?.values ?: emptyList()
|
||||
|
||||
@DFExperimental
|
||||
public val Job.totalWork: Double get() = dependencies.sumOf { totalWork } + (monitor?.totalWork ?: 0.0)
|
||||
@DFExperimental
|
||||
public val Job.workDone: Double get() = dependencies.sumOf { workDone } + (monitor?.workDone ?: 0.0)
|
||||
@DFExperimental
|
||||
public val Job.status: String get() = monitor?.status ?: ""
|
||||
@DFExperimental
|
||||
public val Job.progress: Double get() = workDone / totalWork
|
@ -0,0 +1,107 @@
|
||||
package space.kscience.dataforge.data
|
||||
|
||||
import kotlinx.coroutines.*
|
||||
import space.kscience.dataforge.meta.Meta
|
||||
import space.kscience.dataforge.meta.MetaRepr
|
||||
import space.kscience.dataforge.meta.isEmpty
|
||||
import space.kscience.dataforge.misc.DFInternal
|
||||
import space.kscience.dataforge.misc.DfId
|
||||
import kotlin.coroutines.CoroutineContext
|
||||
import kotlin.coroutines.EmptyCoroutineContext
|
||||
import kotlin.reflect.KType
|
||||
import kotlin.reflect.typeOf
|
||||
|
||||
/**
|
||||
* A data element characterized by its meta
|
||||
*/
|
||||
@DfId(Data.TYPE)
|
||||
public interface Data<out T> : Goal<T>, MetaRepr {
|
||||
/**
|
||||
* Type marker for the data. The type is known before the calculation takes place so it could be checked.
|
||||
*/
|
||||
public val type: KType
|
||||
|
||||
/**
|
||||
* Meta for the data
|
||||
*/
|
||||
public val meta: Meta
|
||||
|
||||
override fun toMeta(): Meta = Meta {
|
||||
"type" put (type.toString())
|
||||
if (!meta.isEmpty()) {
|
||||
"meta" put meta
|
||||
}
|
||||
}
|
||||
|
||||
public companion object {
|
||||
public const val TYPE: String = "data"
|
||||
|
||||
/**
|
||||
* The type that can't have any subtypes
|
||||
*/
|
||||
internal val TYPE_OF_NOTHING: KType = typeOf<Unit>()
|
||||
|
||||
public inline fun <reified T : Any> static(
|
||||
value: T,
|
||||
meta: Meta = Meta.EMPTY,
|
||||
): Data<T> = StaticData(typeOf<T>(), value, meta)
|
||||
|
||||
/**
|
||||
* An empty data containing only meta
|
||||
*/
|
||||
@OptIn(DelicateCoroutinesApi::class)
|
||||
public fun empty(meta: Meta): Data<Nothing> = object : Data<Nothing> {
|
||||
override val type: KType = TYPE_OF_NOTHING
|
||||
override val meta: Meta = meta
|
||||
override val dependencies: Collection<Goal<*>> = emptyList()
|
||||
override val deferred: Deferred<Nothing>
|
||||
get() = GlobalScope.async(start = CoroutineStart.LAZY) {
|
||||
error("The Data is empty and could not be computed")
|
||||
}
|
||||
|
||||
override fun async(coroutineScope: CoroutineScope): Deferred<Nothing> = deferred
|
||||
override fun reset() {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A lazily computed variant of [Data] based on [LazyGoal]
|
||||
* One must ensure that proper [type] is used so this method should not be used
|
||||
*/
|
||||
private class LazyData<T : Any>(
|
||||
override val type: KType,
|
||||
override val meta: Meta = Meta.EMPTY,
|
||||
additionalContext: CoroutineContext = EmptyCoroutineContext,
|
||||
dependencies: Collection<Goal<*>> = emptyList(),
|
||||
block: suspend () -> T,
|
||||
) : Data<T>, LazyGoal<T>(additionalContext, dependencies, block)
|
||||
|
||||
public class StaticData<T : Any>(
|
||||
override val type: KType,
|
||||
value: T,
|
||||
override val meta: Meta = Meta.EMPTY,
|
||||
) : Data<T>, StaticGoal<T>(value)
|
||||
|
||||
@Suppress("FunctionName")
|
||||
public inline fun <reified T : Any> Data(value: T, meta: Meta = Meta.EMPTY): StaticData<T> =
|
||||
StaticData(typeOf<T>(), value, meta)
|
||||
|
||||
@Suppress("FunctionName")
|
||||
@DFInternal
|
||||
public fun <T : Any> Data(
|
||||
type: KType,
|
||||
meta: Meta = Meta.EMPTY,
|
||||
context: CoroutineContext = EmptyCoroutineContext,
|
||||
dependencies: Collection<Goal<*>> = emptyList(),
|
||||
block: suspend () -> T,
|
||||
): Data<T> = LazyData(type, meta, context, dependencies, block)
|
||||
|
||||
@OptIn(DFInternal::class)
|
||||
@Suppress("FunctionName")
|
||||
public inline fun <reified T : Any> Data(
|
||||
meta: Meta = Meta.EMPTY,
|
||||
context: CoroutineContext = EmptyCoroutineContext,
|
||||
dependencies: Collection<Goal<*>> = emptyList(),
|
||||
noinline block: suspend () -> T,
|
||||
): Data<T> = Data(typeOf<T>(), meta, context, dependencies, block)
|
@ -0,0 +1,124 @@
|
||||
package space.kscience.dataforge.data
|
||||
|
||||
import kotlinx.coroutines.*
|
||||
import kotlinx.coroutines.flow.Flow
|
||||
import kotlinx.coroutines.flow.emptyFlow
|
||||
import kotlinx.coroutines.flow.mapNotNull
|
||||
import space.kscience.dataforge.data.Data.Companion.TYPE_OF_NOTHING
|
||||
import space.kscience.dataforge.meta.Meta
|
||||
import space.kscience.dataforge.names.Name
|
||||
import space.kscience.dataforge.names.asName
|
||||
import space.kscience.dataforge.names.endsWith
|
||||
import space.kscience.dataforge.names.parseAsName
|
||||
import kotlin.reflect.KType
|
||||
|
||||
public interface DataSet<out T : Any> {
|
||||
|
||||
/**
|
||||
* The minimal common ancestor to all data in the node
|
||||
*/
|
||||
public val dataType: KType
|
||||
|
||||
/**
|
||||
* Meta-data associated with this node. If no meta is provided, returns [Meta.EMPTY].
|
||||
*/
|
||||
public val meta: Meta
|
||||
|
||||
/**
|
||||
* Traverse this [DataSet] returning named data instances. The order is not guaranteed.
|
||||
*/
|
||||
public operator fun iterator(): Iterator<NamedData<T>>
|
||||
|
||||
/**
|
||||
* Get data with given name.
|
||||
*/
|
||||
public operator fun get(name: Name): Data<T>?
|
||||
|
||||
public companion object {
|
||||
public val META_KEY: Name = "@meta".asName()
|
||||
|
||||
/**
|
||||
* An empty [DataSet] that suits all types
|
||||
*/
|
||||
public val EMPTY: DataSet<Nothing> = object : DataSet<Nothing> {
|
||||
override val dataType: KType = TYPE_OF_NOTHING
|
||||
override val meta: Meta get() = Meta.EMPTY
|
||||
|
||||
override fun iterator(): Iterator<NamedData<Nothing>> = emptySequence<NamedData<Nothing>>().iterator()
|
||||
|
||||
override fun get(name: Name): Data<Nothing>? = null
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public fun <T : Any> DataSet<T>.asSequence(): Sequence<NamedData<T>> = object : Sequence<NamedData<T>> {
|
||||
override fun iterator(): Iterator<NamedData<T>> = this@asSequence.iterator()
|
||||
}
|
||||
|
||||
/**
|
||||
* Return a single [Data] in this [DataSet]. Throw error if it is not single.
|
||||
*/
|
||||
public fun <T : Any> DataSet<T>.single(): NamedData<T> = asSequence().single()
|
||||
|
||||
public fun <T : Any> DataSet<T>.asIterable(): Iterable<NamedData<T>> = object : Iterable<NamedData<T>> {
|
||||
override fun iterator(): Iterator<NamedData<T>> = this@asIterable.iterator()
|
||||
}
|
||||
|
||||
public operator fun <T : Any> DataSet<T>.get(name: String): Data<T>? = get(name.parseAsName())
|
||||
|
||||
/**
|
||||
* A [DataSet] with propagated updates.
|
||||
*/
|
||||
public interface DataSource<out T : Any> : DataSet<T>, CoroutineScope {
|
||||
|
||||
/**
|
||||
* A flow of updated item names. Updates are propagated in a form of [Flow] of names of updated nodes.
|
||||
* Those can include new data items and replacement of existing ones. The replaced items could update existing data content
|
||||
* and replace it completely, so they should be pulled again.
|
||||
*
|
||||
*/
|
||||
public val updates: Flow<Name>
|
||||
|
||||
/**
|
||||
* Stop generating updates from this [DataSource]
|
||||
*/
|
||||
public fun close() {
|
||||
coroutineContext[Job]?.cancel()
|
||||
}
|
||||
}
|
||||
|
||||
public val <T : Any> DataSet<T>.updates: Flow<Name> get() = if (this is DataSource) updates else emptyFlow()
|
||||
//
|
||||
///**
|
||||
// * Flow all data nodes with names starting with [branchName]
|
||||
// */
|
||||
//public fun <T : Any> DataSet<T>.children(branchName: Name): Sequence<NamedData<T>> =
|
||||
// this@children.asSequence().filter {
|
||||
// it.name.startsWith(branchName)
|
||||
// }
|
||||
|
||||
/**
|
||||
* Start computation for all goals in data node and return a job for the whole node
|
||||
*/
|
||||
public fun <T : Any> DataSet<T>.startAll(coroutineScope: CoroutineScope): Job = coroutineScope.launch {
|
||||
asIterable().map {
|
||||
it.launch(this@launch)
|
||||
}.joinAll()
|
||||
}
|
||||
|
||||
public suspend fun <T : Any> DataSet<T>.computeAndJoinAll(): Unit = coroutineScope { startAll(this).join() }
|
||||
|
||||
public fun DataSet<*>.toMeta(): Meta = Meta {
|
||||
forEach {
|
||||
if (it.name.endsWith(DataSet.META_KEY)) {
|
||||
set(it.name, it.meta)
|
||||
} else {
|
||||
it.name put {
|
||||
"type" put it.type.toString()
|
||||
"meta" put it.meta
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public val <T : Any> DataSet<T>.updatesWithData: Flow<NamedData<T>> get() = updates.mapNotNull { get(it)?.named(it) }
|
@ -0,0 +1,165 @@
|
||||
package space.kscience.dataforge.data
|
||||
|
||||
import space.kscience.dataforge.meta.Meta
|
||||
import space.kscience.dataforge.meta.MutableMeta
|
||||
import space.kscience.dataforge.misc.DFExperimental
|
||||
import space.kscience.dataforge.names.Name
|
||||
import space.kscience.dataforge.names.isEmpty
|
||||
import space.kscience.dataforge.names.plus
|
||||
import kotlin.reflect.KType
|
||||
|
||||
public interface DataSetBuilder<in T : Any> {
|
||||
public val dataType: KType
|
||||
|
||||
/**
|
||||
* Remove all data items starting with [name]
|
||||
*/
|
||||
public fun remove(name: Name)
|
||||
|
||||
public fun data(name: Name, data: Data<T>?)
|
||||
|
||||
/**
|
||||
* Set a current state of given [dataSet] into a branch [name]. Does not propagate updates
|
||||
*/
|
||||
public fun node(name: Name, dataSet: DataSet<T>) {
|
||||
//remove previous items
|
||||
if (name != Name.EMPTY) {
|
||||
remove(name)
|
||||
}
|
||||
|
||||
//Set new items
|
||||
dataSet.forEach {
|
||||
data(name + it.name, it.data)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Set meta for the given node
|
||||
*/
|
||||
public fun meta(name: Name, meta: Meta)
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Define meta in this [DataSet]
|
||||
*/
|
||||
public fun <T : Any> DataSetBuilder<T>.meta(value: Meta): Unit = meta(Name.EMPTY, value)
|
||||
|
||||
/**
|
||||
* Define meta in this [DataSet]
|
||||
*/
|
||||
public fun <T : Any> DataSetBuilder<T>.meta(mutableMeta: MutableMeta.() -> Unit): Unit = meta(Meta(mutableMeta))
|
||||
|
||||
@PublishedApi
|
||||
internal class SubSetBuilder<in T : Any>(
|
||||
private val parent: DataSetBuilder<T>,
|
||||
private val branch: Name,
|
||||
) : DataSetBuilder<T> {
|
||||
override val dataType: KType get() = parent.dataType
|
||||
|
||||
override fun remove(name: Name) {
|
||||
parent.remove(branch + name)
|
||||
}
|
||||
|
||||
override fun data(name: Name, data: Data<T>?) {
|
||||
parent.data(branch + name, data)
|
||||
}
|
||||
|
||||
override fun node(name: Name, dataSet: DataSet<T>) {
|
||||
parent.node(branch + name, dataSet)
|
||||
}
|
||||
|
||||
override fun meta(name: Name, meta: Meta) {
|
||||
parent.meta(branch + name, meta)
|
||||
}
|
||||
}
|
||||
|
||||
public inline fun <T : Any> DataSetBuilder<T>.node(
|
||||
name: Name,
|
||||
crossinline block: DataSetBuilder<T>.() -> Unit,
|
||||
) {
|
||||
if (name.isEmpty()) block() else SubSetBuilder(this, name).block()
|
||||
}
|
||||
|
||||
|
||||
public fun <T : Any> DataSetBuilder<T>.data(name: String, value: Data<T>) {
|
||||
data(Name.parse(name), value)
|
||||
}
|
||||
|
||||
public fun <T : Any> DataSetBuilder<T>.node(name: String, set: DataSet<T>) {
|
||||
node(Name.parse(name), set)
|
||||
}
|
||||
|
||||
public inline fun <T : Any> DataSetBuilder<T>.node(
|
||||
name: String,
|
||||
crossinline block: DataSetBuilder<T>.() -> Unit,
|
||||
): Unit = node(Name.parse(name), block)
|
||||
|
||||
public fun <T : Any> DataSetBuilder<T>.set(value: NamedData<T>) {
|
||||
data(value.name, value.data)
|
||||
}
|
||||
|
||||
/**
|
||||
* Produce lazy [Data] and emit it into the [DataSetBuilder]
|
||||
*/
|
||||
public inline fun <reified T : Any> DataSetBuilder<T>.produce(
|
||||
name: String,
|
||||
meta: Meta = Meta.EMPTY,
|
||||
noinline producer: suspend () -> T,
|
||||
) {
|
||||
val data = Data(meta, block = producer)
|
||||
data(name, data)
|
||||
}
|
||||
|
||||
public inline fun <reified T : Any> DataSetBuilder<T>.produce(
|
||||
name: Name,
|
||||
meta: Meta = Meta.EMPTY,
|
||||
noinline producer: suspend () -> T,
|
||||
) {
|
||||
val data = Data(meta, block = producer)
|
||||
data(name, data)
|
||||
}
|
||||
|
||||
/**
|
||||
* Emit a static data with the fixed value
|
||||
*/
|
||||
public inline fun <reified T : Any> DataSetBuilder<T>.static(
|
||||
name: String,
|
||||
data: T,
|
||||
meta: Meta = Meta.EMPTY,
|
||||
): Unit = data(name, Data.static(data, meta))
|
||||
|
||||
public inline fun <reified T : Any> DataSetBuilder<T>.static(
|
||||
name: Name,
|
||||
data: T,
|
||||
meta: Meta = Meta.EMPTY,
|
||||
): Unit = data(name, Data.static(data, meta))
|
||||
|
||||
public inline fun <reified T : Any> DataSetBuilder<T>.static(
|
||||
name: String,
|
||||
data: T,
|
||||
mutableMeta: MutableMeta.() -> Unit,
|
||||
): Unit = data(Name.parse(name), Data.static(data, Meta(mutableMeta)))
|
||||
|
||||
/**
|
||||
* Update data with given node data and meta with node meta.
|
||||
*/
|
||||
@DFExperimental
|
||||
public fun <T : Any> DataSetBuilder<T>.populateFrom(tree: DataSet<T>): Unit {
|
||||
tree.forEach {
|
||||
//TODO check if the place is occupied
|
||||
data(it.name, it.data)
|
||||
}
|
||||
}
|
||||
|
||||
//public fun <T : Any> DataSetBuilder<T>.populateFrom(flow: Flow<NamedData<T>>) {
|
||||
// flow.collect {
|
||||
// data(it.name, it.data)
|
||||
// }
|
||||
//}
|
||||
|
||||
public fun <T : Any> DataSetBuilder<T>.populateFrom(sequence: Sequence<NamedData<T>>) {
|
||||
sequence.forEach {
|
||||
data(it.name, it.data)
|
||||
}
|
||||
}
|
@ -0,0 +1,119 @@
|
||||
package space.kscience.dataforge.data
|
||||
|
||||
import space.kscience.dataforge.meta.Meta
|
||||
import space.kscience.dataforge.misc.DFInternal
|
||||
import space.kscience.dataforge.misc.DfId
|
||||
import space.kscience.dataforge.names.*
|
||||
import kotlin.collections.component1
|
||||
import kotlin.collections.component2
|
||||
import kotlin.reflect.KType
|
||||
import kotlin.reflect.typeOf
|
||||
|
||||
public sealed class DataTreeItem<out T : Any> {
|
||||
|
||||
public abstract val meta: Meta
|
||||
|
||||
public class Node<out T : Any>(public val tree: DataTree<T>) : DataTreeItem<T>() {
|
||||
override val meta: Meta get() = tree.meta
|
||||
}
|
||||
|
||||
public class Leaf<out T : Any>(public val data: Data<T>) : DataTreeItem<T>() {
|
||||
override val meta: Meta get() = data.meta
|
||||
}
|
||||
}
|
||||
|
||||
public val <T : Any> DataTreeItem<T>.type: KType
|
||||
get() = when (this) {
|
||||
is DataTreeItem.Node -> tree.dataType
|
||||
is DataTreeItem.Leaf -> data.type
|
||||
}
|
||||
|
||||
/**
|
||||
* A tree-like [DataSet] grouped into the node. All data inside the node must inherit its type
|
||||
*/
|
||||
@DfId(DataTree.TYPE)
|
||||
public interface DataTree<out T : Any> : DataSet<T> {
|
||||
|
||||
/**
|
||||
* Top-level children items of this [DataTree]
|
||||
*/
|
||||
public val items: Map<NameToken, DataTreeItem<T>>
|
||||
|
||||
override val meta: Meta get() = items[META_ITEM_NAME_TOKEN]?.meta ?: Meta.EMPTY
|
||||
|
||||
override fun iterator(): Iterator<NamedData<T>> = iterator {
|
||||
items.forEach { (token, childItem: DataTreeItem<T>) ->
|
||||
if (!token.body.startsWith("@")) {
|
||||
when (childItem) {
|
||||
is DataTreeItem.Leaf -> yield(childItem.data.named(token.asName()))
|
||||
is DataTreeItem.Node -> yieldAll(childItem.tree.asSequence().map { it.named(token + it.name) })
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
override fun get(name: Name): Data<T>? = when (name.length) {
|
||||
0 -> null
|
||||
1 -> items[name.firstOrNull()!!].data
|
||||
else -> items[name.firstOrNull()!!].tree?.get(name.cutFirst())
|
||||
}
|
||||
|
||||
public companion object {
|
||||
public const val TYPE: String = "dataTree"
|
||||
|
||||
/**
|
||||
* A name token used to designate tree node meta
|
||||
*/
|
||||
public val META_ITEM_NAME_TOKEN: NameToken = NameToken("@meta")
|
||||
|
||||
@DFInternal
|
||||
public fun <T : Any> emptyWithType(type: KType, meta: Meta = Meta.EMPTY): DataTree<T> = object : DataTree<T> {
|
||||
override val items: Map<NameToken, DataTreeItem<T>> get() = emptyMap()
|
||||
override val dataType: KType get() = type
|
||||
override val meta: Meta get() = meta
|
||||
}
|
||||
|
||||
@OptIn(DFInternal::class)
|
||||
public inline fun <reified T : Any> empty(meta: Meta = Meta.EMPTY): DataTree<T> =
|
||||
emptyWithType<T>(typeOf<T>(), meta)
|
||||
}
|
||||
}
|
||||
|
||||
public fun <T : Any> DataTree<T>.listChildren(prefix: Name): List<Name> =
|
||||
getItem(prefix).tree?.items?.keys?.map { prefix + it } ?: emptyList()
|
||||
|
||||
/**
|
||||
* Get a [DataTreeItem] with given [name] or null if the item does not exist
|
||||
*/
|
||||
public tailrec fun <T : Any> DataTree<T>.getItem(name: Name): DataTreeItem<T>? = when (name.length) {
|
||||
0 -> DataTreeItem.Node(this)
|
||||
1 -> items[name.firstOrNull()]
|
||||
else -> items[name.firstOrNull()!!].tree?.getItem(name.cutFirst())
|
||||
}
|
||||
|
||||
public val <T : Any> DataTreeItem<T>?.tree: DataTree<T>? get() = (this as? DataTreeItem.Node<T>)?.tree
|
||||
public val <T : Any> DataTreeItem<T>?.data: Data<T>? get() = (this as? DataTreeItem.Leaf<T>)?.data
|
||||
|
||||
/**
|
||||
* A [Sequence] of all children including nodes
|
||||
*/
|
||||
public fun <T : Any> DataTree<T>.traverseItems(): Sequence<Pair<Name, DataTreeItem<T>>> = sequence {
|
||||
items.forEach { (head, item) ->
|
||||
yield(head.asName() to item)
|
||||
if (item is DataTreeItem.Node) {
|
||||
val subSequence = item.tree.traverseItems()
|
||||
.map { (name, data) -> (head.asName() + name) to data }
|
||||
yieldAll(subSequence)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a branch of this [DataTree] with a given [branchName].
|
||||
* The difference from similar method for [DataSet] is that internal logic is more simple and the return value is a [DataTree]
|
||||
*/
|
||||
@OptIn(DFInternal::class)
|
||||
public fun <T : Any> DataTree<T>.branch(branchName: Name): DataTree<T> =
|
||||
getItem(branchName)?.tree ?: DataTree.emptyWithType(dataType)
|
||||
|
||||
public fun <T : Any> DataTree<T>.branch(branchName: String): DataTree<T> = branch(branchName.parseAsName())
|
@ -0,0 +1,127 @@
|
||||
package space.kscience.dataforge.data
|
||||
|
||||
import kotlinx.coroutines.CoroutineScope
|
||||
import kotlinx.coroutines.Job
|
||||
import kotlinx.coroutines.flow.MutableSharedFlow
|
||||
import kotlinx.coroutines.launch
|
||||
import space.kscience.dataforge.meta.Meta
|
||||
import space.kscience.dataforge.misc.DFInternal
|
||||
import space.kscience.dataforge.misc.ThreadSafe
|
||||
import space.kscience.dataforge.names.*
|
||||
import kotlin.collections.set
|
||||
import kotlin.coroutines.CoroutineContext
|
||||
import kotlin.coroutines.coroutineContext
|
||||
import kotlin.reflect.KType
|
||||
import kotlin.reflect.typeOf
|
||||
|
||||
public interface DataSourceBuilder<T : Any> : DataSetBuilder<T>, DataSource<T> {
|
||||
override val updates: MutableSharedFlow<Name>
|
||||
}
|
||||
|
||||
/**
|
||||
* A mutable [DataTree] that propagates updates
|
||||
*/
|
||||
public class DataTreeBuilder<T : Any>(
|
||||
override val dataType: KType,
|
||||
coroutineContext: CoroutineContext,
|
||||
) : DataTree<T>, DataSourceBuilder<T> {
|
||||
|
||||
override val coroutineContext: CoroutineContext =
|
||||
coroutineContext + Job(coroutineContext[Job]) + GoalExecutionRestriction()
|
||||
|
||||
private val treeItems = HashMap<NameToken, DataTreeItem<T>>()
|
||||
|
||||
override val items: Map<NameToken, DataTreeItem<T>>
|
||||
get() = treeItems.filter { !it.key.body.startsWith("@") }
|
||||
|
||||
override val updates: MutableSharedFlow<Name> = MutableSharedFlow<Name>()
|
||||
|
||||
@ThreadSafe
|
||||
private fun remove(token: NameToken) {
|
||||
if (treeItems.remove(token) != null) {
|
||||
launch {
|
||||
updates.emit(token.asName())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
override fun remove(name: Name) {
|
||||
if (name.isEmpty()) error("Can't remove the root node")
|
||||
(getItem(name.cutLast()).tree as? DataTreeBuilder)?.remove(name.lastOrNull()!!)
|
||||
}
|
||||
|
||||
@ThreadSafe
|
||||
private fun set(token: NameToken, data: Data<T>) {
|
||||
treeItems[token] = DataTreeItem.Leaf(data)
|
||||
}
|
||||
|
||||
@ThreadSafe
|
||||
private fun set(token: NameToken, node: DataTree<T>) {
|
||||
treeItems[token] = DataTreeItem.Node(node)
|
||||
}
|
||||
|
||||
private fun getOrCreateNode(token: NameToken): DataTreeBuilder<T> =
|
||||
(treeItems[token] as? DataTreeItem.Node<T>)?.tree as? DataTreeBuilder<T>
|
||||
?: DataTreeBuilder<T>(dataType, coroutineContext).also { set(token, it) }
|
||||
|
||||
private fun getOrCreateNode(name: Name): DataTreeBuilder<T> = when (name.length) {
|
||||
0 -> this
|
||||
1 -> getOrCreateNode(name.firstOrNull()!!)
|
||||
else -> getOrCreateNode(name.firstOrNull()!!).getOrCreateNode(name.cutFirst())
|
||||
}
|
||||
|
||||
override fun data(name: Name, data: Data<T>?) {
|
||||
if (data == null) {
|
||||
remove(name)
|
||||
} else {
|
||||
when (name.length) {
|
||||
0 -> error("Can't add data with empty name")
|
||||
1 -> set(name.firstOrNull()!!, data)
|
||||
2 -> getOrCreateNode(name.cutLast()).set(name.lastOrNull()!!, data)
|
||||
}
|
||||
}
|
||||
launch {
|
||||
updates.emit(name)
|
||||
}
|
||||
}
|
||||
|
||||
override fun meta(name: Name, meta: Meta) {
|
||||
val item = getItem(name)
|
||||
if (item is DataTreeItem.Leaf) error("TODO: Can't change meta of existing leaf item.")
|
||||
data(name + DataTree.META_ITEM_NAME_TOKEN, Data.empty(meta))
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a dynamic [DataSource]. Initial data is placed synchronously.
|
||||
*/
|
||||
@DFInternal
|
||||
@Suppress("FunctionName")
|
||||
public fun <T : Any> DataSource(
|
||||
type: KType,
|
||||
parent: CoroutineScope,
|
||||
block: DataSourceBuilder<T>.() -> Unit,
|
||||
): DataTreeBuilder<T> = DataTreeBuilder<T>(type, parent.coroutineContext).apply(block)
|
||||
|
||||
@Suppress("OPT_IN_USAGE", "FunctionName")
|
||||
public inline fun <reified T : Any> DataSource(
|
||||
parent: CoroutineScope,
|
||||
crossinline block: DataSourceBuilder<T>.() -> Unit,
|
||||
): DataTreeBuilder<T> = DataSource(typeOf<T>(), parent) { block() }
|
||||
|
||||
@Suppress("FunctionName")
|
||||
public suspend inline fun <reified T : Any> DataSource(
|
||||
crossinline block: DataSourceBuilder<T>.() -> Unit = {},
|
||||
): DataTreeBuilder<T> = DataTreeBuilder<T>(typeOf<T>(), coroutineContext).apply { block() }
|
||||
|
||||
public inline fun <reified T : Any> DataSourceBuilder<T>.emit(
|
||||
name: Name,
|
||||
parent: CoroutineScope,
|
||||
noinline block: DataSourceBuilder<T>.() -> Unit,
|
||||
): Unit = node(name, DataSource(parent, block))
|
||||
|
||||
public inline fun <reified T : Any> DataSourceBuilder<T>.emit(
|
||||
name: String,
|
||||
parent: CoroutineScope,
|
||||
noinline block: DataSourceBuilder<T>.() -> Unit,
|
||||
): Unit = node(Name.parse(name), DataSource(parent, block))
|
@ -0,0 +1,112 @@
|
||||
package space.kscience.dataforge.data
|
||||
|
||||
import kotlinx.coroutines.*
|
||||
import space.kscience.dataforge.misc.DFExperimental
|
||||
import kotlin.coroutines.CoroutineContext
|
||||
import kotlin.coroutines.EmptyCoroutineContext
|
||||
|
||||
/**
|
||||
* Lazy computation result with its dependencies to allowing to stat computing dependencies ahead of time
|
||||
*/
|
||||
public interface Goal<out T> {
|
||||
public val dependencies: Collection<Goal<*>>
|
||||
|
||||
/**
|
||||
* Returns current running coroutine if the goal is started. Null if the computation is not started.
|
||||
*/
|
||||
public val deferred: Deferred<T>?
|
||||
|
||||
/**
|
||||
* Get ongoing computation or start a new one.
|
||||
* Does not guarantee thread safety. In case of multi-thread access, could create orphan computations.
|
||||
*
|
||||
* If the computation is already running, the scope is not used.
|
||||
*/
|
||||
public fun async(coroutineScope: CoroutineScope): Deferred<T>
|
||||
|
||||
/**
|
||||
* Reset the computation
|
||||
*/
|
||||
public fun reset()
|
||||
|
||||
public companion object
|
||||
}
|
||||
|
||||
public fun Goal<*>.launch(coroutineScope: CoroutineScope): Job = async(coroutineScope)
|
||||
|
||||
public suspend fun <T> Goal<T>.await(): T = coroutineScope { async(this).await() }
|
||||
|
||||
public val Goal<*>.isComplete: Boolean get() = deferred?.isCompleted ?: false
|
||||
|
||||
public open class StaticGoal<T>(public val value: T) : Goal<T> {
|
||||
override val dependencies: Collection<Goal<*>> get() = emptyList()
|
||||
override val deferred: Deferred<T> = CompletableDeferred(value)
|
||||
|
||||
override fun async(coroutineScope: CoroutineScope): Deferred<T> = deferred
|
||||
|
||||
override fun reset() {
|
||||
//doNothing
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param coroutineContext additional context information
|
||||
*/
|
||||
public open class LazyGoal<T>(
|
||||
private val coroutineContext: CoroutineContext = EmptyCoroutineContext,
|
||||
override val dependencies: Collection<Goal<*>> = emptyList(),
|
||||
public val block: suspend () -> T,
|
||||
) : Goal<T> {
|
||||
|
||||
final override var deferred: Deferred<T>? = null
|
||||
private set
|
||||
|
||||
/**
|
||||
* Get ongoing computation or start a new one.
|
||||
* Does not guarantee thread safety. In case of multi-thread access, could create orphan computations.
|
||||
* If [GoalExecutionRestriction] is present in the [coroutineScope] context, the call could produce a error a warning
|
||||
* depending on the settings.
|
||||
*/
|
||||
@OptIn(DFExperimental::class)
|
||||
override fun async(coroutineScope: CoroutineScope): Deferred<T> {
|
||||
val log = coroutineScope.coroutineContext[GoalLogger]
|
||||
// Check if context restricts goal computation
|
||||
coroutineScope.coroutineContext[GoalExecutionRestriction]?.let { restriction ->
|
||||
when (restriction.policy) {
|
||||
GoalExecutionRestrictionPolicy.WARNING -> log?.emit(GoalLogger.WARNING_TAG) { "Goal eager execution is prohibited by the coroutine scope policy" }
|
||||
GoalExecutionRestrictionPolicy.ERROR -> error("Goal eager execution is prohibited by the coroutine scope policy")
|
||||
else -> {
|
||||
/*do nothing*/
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
log?.emit { "Starting dependencies computation for ${this@LazyGoal}" }
|
||||
val startedDependencies = this.dependencies.map { goal ->
|
||||
goal.run { async(coroutineScope) }
|
||||
}
|
||||
return deferred ?: coroutineScope.async(
|
||||
coroutineContext
|
||||
+ CoroutineMonitor()
|
||||
+ Dependencies(startedDependencies)
|
||||
+ GoalExecutionRestriction(GoalExecutionRestrictionPolicy.NONE) // Remove restrictions on goal execution
|
||||
) {
|
||||
//cancel execution if error encountered in one of dependencies
|
||||
startedDependencies.forEach { deferred ->
|
||||
deferred.invokeOnCompletion { error ->
|
||||
if (error != null) this.cancel(CancellationException("Dependency $deferred failed with error: ${error.message}"))
|
||||
}
|
||||
}
|
||||
coroutineContext[GoalLogger]?.emit { "Starting computation of ${this@LazyGoal}" }
|
||||
block()
|
||||
}.also { deferred = it }
|
||||
}
|
||||
|
||||
/**
|
||||
* Reset the computation
|
||||
*/
|
||||
override fun reset() {
|
||||
deferred?.cancel()
|
||||
deferred = null
|
||||
}
|
||||
}
|
@ -0,0 +1,31 @@
|
||||
package space.kscience.dataforge.data
|
||||
|
||||
import kotlin.coroutines.CoroutineContext
|
||||
|
||||
public enum class GoalExecutionRestrictionPolicy {
|
||||
/**
|
||||
* Allow eager execution
|
||||
*/
|
||||
NONE,
|
||||
|
||||
/**
|
||||
* Give warning on eager execution
|
||||
*/
|
||||
WARNING,
|
||||
|
||||
/**
|
||||
* Throw error on eager execution
|
||||
*/
|
||||
ERROR
|
||||
}
|
||||
|
||||
/**
|
||||
* A special coroutine context key that allows or disallows goal execution during configuration time (eager execution).
|
||||
*/
|
||||
public class GoalExecutionRestriction(
|
||||
public val policy: GoalExecutionRestrictionPolicy = GoalExecutionRestrictionPolicy.ERROR,
|
||||
) : CoroutineContext.Element {
|
||||
override val key: CoroutineContext.Key<*> get() = Companion
|
||||
|
||||
public companion object : CoroutineContext.Key<GoalExecutionRestriction>
|
||||
}
|
@ -0,0 +1,16 @@
|
||||
package space.kscience.dataforge.data
|
||||
|
||||
import kotlin.coroutines.CoroutineContext
|
||||
|
||||
/**
|
||||
* Coroutine context element that provides logging capabilities
|
||||
*/
|
||||
public interface GoalLogger : CoroutineContext.Element {
|
||||
override val key: CoroutineContext.Key<*> get() = GoalLogger
|
||||
|
||||
public fun emit(vararg tags: String, message: suspend () -> String)
|
||||
|
||||
public companion object : CoroutineContext.Key<GoalLogger>{
|
||||
public const val WARNING_TAG: String = "WARNING"
|
||||
}
|
||||
}
|
@ -0,0 +1,78 @@
|
||||
/*
|
||||
* Copyright 2015 Alexander Nozik.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package space.kscience.dataforge.data
|
||||
|
||||
import kotlinx.coroutines.launch
|
||||
import space.kscience.dataforge.meta.get
|
||||
import space.kscience.dataforge.meta.string
|
||||
import space.kscience.dataforge.misc.DFInternal
|
||||
|
||||
public interface GroupRule {
|
||||
public fun <T : Any> gather(set: DataSet<T>): Map<String, DataSet<T>>
|
||||
|
||||
public companion object {
|
||||
/**
|
||||
* Create grouping rule that creates groups for different values of value
|
||||
* field with name [key]
|
||||
*
|
||||
* @param key
|
||||
* @param defaultTagValue
|
||||
* @return
|
||||
*/
|
||||
@OptIn(DFInternal::class)
|
||||
public fun byMetaValue(
|
||||
key: String,
|
||||
defaultTagValue: String,
|
||||
): GroupRule = object : GroupRule {
|
||||
|
||||
override fun <T : Any> gather(
|
||||
set: DataSet<T>,
|
||||
): Map<String, DataSet<T>> {
|
||||
val map = HashMap<String, DataSet<T>>()
|
||||
|
||||
if (set is DataSource) {
|
||||
set.forEach { data ->
|
||||
val tagValue: String = data.meta[key]?.string ?: defaultTagValue
|
||||
(map.getOrPut(tagValue) { DataTreeBuilder(set.dataType, set.coroutineContext) } as DataTreeBuilder<T>)
|
||||
.data(data.name, data.data)
|
||||
|
||||
set.launch {
|
||||
set.updates.collect { name ->
|
||||
val dataUpdate = set[name]
|
||||
|
||||
val updateTagValue = dataUpdate?.meta?.get(key)?.string ?: defaultTagValue
|
||||
map.getOrPut(updateTagValue) {
|
||||
DataSource(set.dataType, this) {
|
||||
data(name, dataUpdate)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
set.forEach { data ->
|
||||
val tagValue: String = data.meta[key]?.string ?: defaultTagValue
|
||||
(map.getOrPut(tagValue) { StaticDataTree(set.dataType) } as StaticDataTree<T>)
|
||||
.data(data.name, data.data)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
return map
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,35 @@
|
||||
package space.kscience.dataforge.data
|
||||
|
||||
import space.kscience.dataforge.meta.isEmpty
|
||||
import space.kscience.dataforge.misc.Named
|
||||
import space.kscience.dataforge.names.Name
|
||||
|
||||
public interface NamedData<out T : Any> : Named, Data<T> {
|
||||
override val name: Name
|
||||
public val data: Data<T>
|
||||
}
|
||||
|
||||
public operator fun NamedData<*>.component1(): Name = name
|
||||
public operator fun <T: Any> NamedData<T>.component2(): Data<T> = data
|
||||
|
||||
private class NamedDataImpl<out T : Any>(
|
||||
override val name: Name,
|
||||
override val data: Data<T>,
|
||||
) : Data<T> by data, NamedData<T> {
|
||||
override fun toString(): String = buildString {
|
||||
append("NamedData(name=\"$name\"")
|
||||
if (data is StaticData) {
|
||||
append(", value=${data.value}")
|
||||
}
|
||||
if (!data.meta.isEmpty()) {
|
||||
append(", meta=${data.meta}")
|
||||
}
|
||||
append(")")
|
||||
}
|
||||
}
|
||||
|
||||
public fun <T : Any> Data<T>.named(name: Name): NamedData<T> = if (this is NamedData) {
|
||||
NamedDataImpl(name, this.data)
|
||||
} else {
|
||||
NamedDataImpl(name, this)
|
||||
}
|
@ -0,0 +1,82 @@
|
||||
package space.kscience.dataforge.data
|
||||
|
||||
import space.kscience.dataforge.meta.Meta
|
||||
import space.kscience.dataforge.misc.DFExperimental
|
||||
import space.kscience.dataforge.names.*
|
||||
import kotlin.reflect.KType
|
||||
import kotlin.reflect.typeOf
|
||||
|
||||
@PublishedApi
|
||||
internal class StaticDataTree<T : Any>(
|
||||
override val dataType: KType,
|
||||
) : DataSetBuilder<T>, DataTree<T> {
|
||||
|
||||
private val _items: MutableMap<NameToken, DataTreeItem<T>> = HashMap()
|
||||
|
||||
override val items: Map<NameToken, DataTreeItem<T>>
|
||||
get() = _items.filter { !it.key.body.startsWith("@") }
|
||||
|
||||
override fun remove(name: Name) {
|
||||
when (name.length) {
|
||||
0 -> error("Can't remove root tree node")
|
||||
1 -> _items.remove(name.firstOrNull()!!)
|
||||
else -> (_items[name.firstOrNull()!!].tree as? StaticDataTree<T>)?.remove(name.cutFirst())
|
||||
}
|
||||
}
|
||||
|
||||
private fun getOrCreateNode(name: Name): StaticDataTree<T> = when (name.length) {
|
||||
0 -> this
|
||||
1 -> {
|
||||
val itemName = name.firstOrNull()!!
|
||||
(_items[itemName].tree as? StaticDataTree<T>) ?: StaticDataTree<T>(dataType).also {
|
||||
_items[itemName] = DataTreeItem.Node(it)
|
||||
}
|
||||
}
|
||||
else -> getOrCreateNode(name.cutLast()).getOrCreateNode(name.lastOrNull()!!.asName())
|
||||
}
|
||||
|
||||
private fun set(name: Name, item: DataTreeItem<T>?) {
|
||||
if (name.isEmpty()) error("Can't set top level tree node")
|
||||
if (item == null) {
|
||||
remove(name)
|
||||
} else {
|
||||
getOrCreateNode(name.cutLast())._items[name.lastOrNull()!!] = item
|
||||
}
|
||||
}
|
||||
|
||||
override fun data(name: Name, data: Data<T>?) {
|
||||
set(name, data?.let { DataTreeItem.Leaf(it) })
|
||||
}
|
||||
|
||||
override fun node(name: Name, dataSet: DataSet<T>) {
|
||||
if (dataSet is StaticDataTree) {
|
||||
set(name, DataTreeItem.Node(dataSet))
|
||||
} else {
|
||||
dataSet.forEach {
|
||||
data(name + it.name, it.data)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
override fun meta(name: Name, meta: Meta) {
|
||||
val item = getItem(name)
|
||||
if (item is DataTreeItem.Leaf) TODO("Can't change meta of existing leaf item.")
|
||||
data(name + DataTree.META_ITEM_NAME_TOKEN, Data.empty(meta))
|
||||
}
|
||||
}
|
||||
|
||||
@Suppress("FunctionName")
|
||||
public inline fun <T : Any> DataTree(
|
||||
dataType: KType,
|
||||
block: DataSetBuilder<T>.() -> Unit,
|
||||
): DataTree<T> = StaticDataTree<T>(dataType).apply { block() }
|
||||
|
||||
@Suppress("FunctionName")
|
||||
public inline fun <reified T : Any> DataTree(
|
||||
noinline block: DataSetBuilder<T>.() -> Unit,
|
||||
): DataTree<T> = DataTree(typeOf<T>(), block)
|
||||
|
||||
@OptIn(DFExperimental::class)
|
||||
public fun <T : Any> DataSet<T>.seal(): DataTree<T> = DataTree(dataType) {
|
||||
populateFrom(this@seal)
|
||||
}
|
@ -0,0 +1,105 @@
|
||||
package space.kscience.dataforge.data
|
||||
|
||||
import kotlinx.coroutines.flow.Flow
|
||||
import kotlinx.coroutines.flow.filter
|
||||
import kotlinx.coroutines.flow.map
|
||||
import kotlinx.coroutines.flow.mapNotNull
|
||||
import space.kscience.dataforge.meta.Meta
|
||||
import space.kscience.dataforge.misc.DFExperimental
|
||||
import space.kscience.dataforge.names.*
|
||||
import kotlin.coroutines.CoroutineContext
|
||||
import kotlin.coroutines.EmptyCoroutineContext
|
||||
import kotlin.reflect.KType
|
||||
|
||||
|
||||
/**
|
||||
* A stateless filtered [DataSet]
|
||||
*/
|
||||
public fun <T : Any> DataSet<T>.filter(
|
||||
predicate: (Name, Meta) -> Boolean,
|
||||
): DataSource<T> = object : DataSource<T> {
|
||||
|
||||
override val dataType: KType get() = this@filter.dataType
|
||||
|
||||
override val coroutineContext: CoroutineContext
|
||||
get() = (this@filter as? DataSource)?.coroutineContext ?: EmptyCoroutineContext
|
||||
|
||||
|
||||
override val meta: Meta get() = this@filter.meta
|
||||
|
||||
override fun iterator(): Iterator<NamedData<T>> = iterator {
|
||||
for (d in this@filter) {
|
||||
if (predicate(d.name, d.meta)) {
|
||||
yield(d)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
override fun get(name: Name): Data<T>? = this@filter.get(name)?.takeIf {
|
||||
predicate(name, it.meta)
|
||||
}
|
||||
|
||||
override val updates: Flow<Name> = this@filter.updates.filter flowFilter@{ name ->
|
||||
val theData = this@filter[name] ?: return@flowFilter false
|
||||
predicate(name, theData.meta)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate a wrapper data set with a given name prefix appended to all names
|
||||
*/
|
||||
public fun <T : Any> DataSet<T>.withNamePrefix(prefix: Name): DataSet<T> = if (prefix.isEmpty()) {
|
||||
this
|
||||
} else object : DataSource<T> {
|
||||
|
||||
override val dataType: KType get() = this@withNamePrefix.dataType
|
||||
|
||||
override val coroutineContext: CoroutineContext
|
||||
get() = (this@withNamePrefix as? DataSource)?.coroutineContext ?: EmptyCoroutineContext
|
||||
|
||||
override val meta: Meta get() = this@withNamePrefix.meta
|
||||
|
||||
|
||||
override fun iterator(): Iterator<NamedData<T>> = iterator {
|
||||
for (d in this@withNamePrefix) {
|
||||
yield(d.data.named(prefix + d.name))
|
||||
}
|
||||
}
|
||||
|
||||
override fun get(name: Name): Data<T>? =
|
||||
name.removeFirstOrNull(name)?.let { this@withNamePrefix.get(it) }
|
||||
|
||||
override val updates: Flow<Name> get() = this@withNamePrefix.updates.map { prefix + it }
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a subset of data starting with a given [branchName]
|
||||
*/
|
||||
public fun <T : Any> DataSet<T>.branch(branchName: Name): DataSet<T> = if (branchName.isEmpty()) {
|
||||
this
|
||||
} else object : DataSource<T> {
|
||||
override val dataType: KType get() = this@branch.dataType
|
||||
|
||||
override val coroutineContext: CoroutineContext
|
||||
get() = (this@branch as? DataSource)?.coroutineContext ?: EmptyCoroutineContext
|
||||
|
||||
override val meta: Meta get() = this@branch.meta
|
||||
|
||||
override fun iterator(): Iterator<NamedData<T>> = iterator {
|
||||
for (d in this@branch) {
|
||||
d.name.removeFirstOrNull(branchName)?.let { name ->
|
||||
yield(d.data.named(name))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
override fun get(name: Name): Data<T>? = this@branch.get(branchName + name)
|
||||
|
||||
override val updates: Flow<Name> get() = this@branch.updates.mapNotNull { it.removeFirstOrNull(branchName) }
|
||||
}
|
||||
|
||||
public fun <T : Any> DataSet<T>.branch(branchName: String): DataSet<T> = this@branch.branch(branchName.parseAsName())
|
||||
|
||||
@DFExperimental
|
||||
public suspend fun <T : Any> DataSet<T>.rootData(): Data<T>? = get(Name.EMPTY)
|
||||
|
@ -0,0 +1,221 @@
|
||||
package space.kscience.dataforge.data
|
||||
|
||||
import space.kscience.dataforge.meta.Meta
|
||||
import space.kscience.dataforge.meta.MutableMeta
|
||||
import space.kscience.dataforge.meta.seal
|
||||
import space.kscience.dataforge.meta.toMutableMeta
|
||||
import space.kscience.dataforge.misc.DFInternal
|
||||
import space.kscience.dataforge.names.Name
|
||||
import kotlin.coroutines.CoroutineContext
|
||||
import kotlin.coroutines.EmptyCoroutineContext
|
||||
import kotlin.reflect.KType
|
||||
import kotlin.reflect.typeOf
|
||||
|
||||
public data class ValueWithMeta<T>(val meta: Meta, val value: T)
|
||||
|
||||
public suspend fun <T : Any> Data<T>.awaitWithMeta(): ValueWithMeta<T> = ValueWithMeta(meta, await())
|
||||
|
||||
public data class NamedValueWithMeta<T>(val name: Name, val meta: Meta, val value: T)
|
||||
|
||||
public suspend fun <T : Any> NamedData<T>.awaitWithMeta(): NamedValueWithMeta<T> =
|
||||
NamedValueWithMeta(name, meta, await())
|
||||
|
||||
|
||||
/**
|
||||
* Lazily transform this data to another data. By convention [block] should not use external data (be pure).
|
||||
* @param coroutineContext additional [CoroutineContext] elements used for data computation.
|
||||
* @param meta for the resulting data. By default equals input data.
|
||||
* @param block the transformation itself
|
||||
*/
|
||||
public inline fun <T : Any, reified R : Any> Data<T>.map(
|
||||
coroutineContext: CoroutineContext = EmptyCoroutineContext,
|
||||
meta: Meta = this.meta,
|
||||
crossinline block: suspend (T) -> R,
|
||||
): Data<R> = Data(meta, coroutineContext, listOf(this)) {
|
||||
block(await())
|
||||
}
|
||||
|
||||
/**
|
||||
* Combine this data with the other data using [block]. See [Data::map] for other details
|
||||
*/
|
||||
public inline fun <T1 : Any, T2 : Any, reified R : Any> Data<T1>.combine(
|
||||
other: Data<T2>,
|
||||
coroutineContext: CoroutineContext = EmptyCoroutineContext,
|
||||
meta: Meta = this.meta,
|
||||
crossinline block: suspend (left: T1, right: T2) -> R,
|
||||
): Data<R> = Data(meta, coroutineContext, listOf(this, other)) {
|
||||
block(await(), other.await())
|
||||
}
|
||||
|
||||
|
||||
//data collection operations
|
||||
|
||||
/**
|
||||
* Lazily reduce a collection of [Data] to a single data.
|
||||
*/
|
||||
public inline fun <T : Any, reified R : Any> Collection<Data<T>>.reduceToData(
|
||||
coroutineContext: CoroutineContext = EmptyCoroutineContext,
|
||||
meta: Meta = Meta.EMPTY,
|
||||
crossinline block: suspend (List<ValueWithMeta<T>>) -> R,
|
||||
): Data<R> = Data(
|
||||
meta,
|
||||
coroutineContext,
|
||||
this
|
||||
) {
|
||||
block(map { it.awaitWithMeta() })
|
||||
}
|
||||
|
||||
@DFInternal
|
||||
public fun <K, T : Any, R : Any> Map<K, Data<T>>.reduceToData(
|
||||
outputType: KType,
|
||||
coroutineContext: CoroutineContext = EmptyCoroutineContext,
|
||||
meta: Meta = Meta.EMPTY,
|
||||
block: suspend (Map<K, ValueWithMeta<T>>) -> R,
|
||||
): Data<R> = Data(
|
||||
outputType,
|
||||
meta,
|
||||
coroutineContext,
|
||||
this.values
|
||||
) {
|
||||
block(mapValues { it.value.awaitWithMeta() })
|
||||
}
|
||||
|
||||
/**
|
||||
* Lazily reduce a [Map] of [Data] with any static key.
|
||||
* @param K type of the map key
|
||||
* @param T type of the input goal
|
||||
* @param R type of the result goal
|
||||
*/
|
||||
public inline fun <K, T : Any, reified R : Any> Map<K, Data<T>>.reduceToData(
|
||||
coroutineContext: CoroutineContext = EmptyCoroutineContext,
|
||||
meta: Meta = Meta.EMPTY,
|
||||
crossinline block: suspend (Map<K, ValueWithMeta<T>>) -> R,
|
||||
): Data<R> = Data(
|
||||
meta,
|
||||
coroutineContext,
|
||||
this.values
|
||||
) {
|
||||
block(mapValues { it.value.awaitWithMeta() })
|
||||
}
|
||||
|
||||
//Iterable operations
|
||||
|
||||
@DFInternal
|
||||
public inline fun <T : Any, R : Any> Iterable<Data<T>>.reduceToData(
|
||||
outputType: KType,
|
||||
coroutineContext: CoroutineContext = EmptyCoroutineContext,
|
||||
meta: Meta = Meta.EMPTY,
|
||||
crossinline transformation: suspend (Collection<ValueWithMeta<T>>) -> R,
|
||||
): Data<R> = Data(
|
||||
outputType,
|
||||
meta,
|
||||
coroutineContext,
|
||||
toList()
|
||||
) {
|
||||
transformation(map { it.awaitWithMeta() })
|
||||
}
|
||||
|
||||
@OptIn(DFInternal::class)
|
||||
public inline fun <T : Any, reified R : Any> Iterable<Data<T>>.reduceToData(
|
||||
coroutineContext: CoroutineContext = EmptyCoroutineContext,
|
||||
meta: Meta = Meta.EMPTY,
|
||||
crossinline transformation: suspend (Collection<ValueWithMeta<T>>) -> R,
|
||||
): Data<R> = reduceToData(typeOf<R>(), coroutineContext, meta) {
|
||||
transformation(it)
|
||||
}
|
||||
|
||||
public inline fun <T : Any, reified R : Any> Iterable<Data<T>>.foldToData(
|
||||
initial: R,
|
||||
coroutineContext: CoroutineContext = EmptyCoroutineContext,
|
||||
meta: Meta = Meta.EMPTY,
|
||||
crossinline block: suspend (result: R, data: ValueWithMeta<T>) -> R,
|
||||
): Data<R> = reduceToData(
|
||||
coroutineContext, meta
|
||||
) {
|
||||
it.fold(initial) { acc, t -> block(acc, t) }
|
||||
}
|
||||
|
||||
/**
|
||||
* Transform an [Iterable] of [NamedData] to a single [Data].
|
||||
*/
|
||||
@DFInternal
|
||||
public inline fun <T : Any, R : Any> Iterable<NamedData<T>>.reduceNamedToData(
|
||||
outputType: KType,
|
||||
coroutineContext: CoroutineContext = EmptyCoroutineContext,
|
||||
meta: Meta = Meta.EMPTY,
|
||||
crossinline transformation: suspend (Collection<NamedValueWithMeta<T>>) -> R,
|
||||
): Data<R> = Data(
|
||||
outputType,
|
||||
meta,
|
||||
coroutineContext,
|
||||
toList()
|
||||
) {
|
||||
transformation(map { it.awaitWithMeta() })
|
||||
}
|
||||
|
||||
@OptIn(DFInternal::class)
|
||||
public inline fun <T : Any, reified R : Any> Iterable<NamedData<T>>.reduceNamedToData(
|
||||
coroutineContext: CoroutineContext = EmptyCoroutineContext,
|
||||
meta: Meta = Meta.EMPTY,
|
||||
crossinline transformation: suspend (Collection<NamedValueWithMeta<T>>) -> R,
|
||||
): Data<R> = reduceNamedToData(typeOf<R>(), coroutineContext, meta) {
|
||||
transformation(it)
|
||||
}
|
||||
|
||||
/**
|
||||
* Fold a [Iterable] of named data into a single [Data]
|
||||
*/
|
||||
public inline fun <T : Any, reified R : Any> Iterable<NamedData<T>>.foldNamedToData(
|
||||
initial: R,
|
||||
coroutineContext: CoroutineContext = EmptyCoroutineContext,
|
||||
meta: Meta = Meta.EMPTY,
|
||||
crossinline block: suspend (result: R, data: NamedValueWithMeta<T>) -> R,
|
||||
): Data<R> = reduceNamedToData(
|
||||
coroutineContext, meta
|
||||
) {
|
||||
it.fold(initial) { acc, t -> block(acc, t) }
|
||||
}
|
||||
|
||||
//DataSet operations
|
||||
|
||||
@DFInternal
|
||||
public suspend fun <T : Any, R : Any> DataSet<T>.map(
|
||||
outputType: KType,
|
||||
coroutineContext: CoroutineContext = EmptyCoroutineContext,
|
||||
metaTransform: MutableMeta.() -> Unit = {},
|
||||
block: suspend (NamedValueWithMeta<T>) -> R,
|
||||
): DataTree<R> = DataTree<R>(outputType) {
|
||||
forEach {
|
||||
val newMeta = it.meta.toMutableMeta().apply(metaTransform).seal()
|
||||
val d = Data(outputType, newMeta, coroutineContext, listOf(it)) {
|
||||
block(it.awaitWithMeta())
|
||||
}
|
||||
data(it.name, d)
|
||||
}
|
||||
}
|
||||
|
||||
@OptIn(DFInternal::class)
|
||||
public suspend inline fun <T : Any, reified R : Any> DataSet<T>.map(
|
||||
coroutineContext: CoroutineContext = EmptyCoroutineContext,
|
||||
noinline metaTransform: MutableMeta.() -> Unit = {},
|
||||
noinline block: suspend (NamedValueWithMeta<T>) -> R,
|
||||
): DataTree<R> = map(typeOf<R>(), coroutineContext, metaTransform, block)
|
||||
|
||||
public inline fun <T : Any> DataSet<T>.forEach(block: (NamedData<T>) -> Unit) {
|
||||
for (d in this) {
|
||||
block(d)
|
||||
}
|
||||
}
|
||||
|
||||
public inline fun <T : Any, reified R : Any> DataSet<T>.reduceToData(
|
||||
coroutineContext: CoroutineContext = EmptyCoroutineContext,
|
||||
meta: Meta = Meta.EMPTY,
|
||||
crossinline transformation: suspend (Iterable<NamedValueWithMeta<T>>) -> R,
|
||||
): Data<R> = asIterable().reduceNamedToData(coroutineContext, meta, transformation)
|
||||
|
||||
public inline fun <T : Any, reified R : Any> DataSet<T>.foldToData(
|
||||
initial: R,
|
||||
coroutineContext: CoroutineContext = EmptyCoroutineContext,
|
||||
meta: Meta = Meta.EMPTY,
|
||||
crossinline block: suspend (result: R, data: NamedValueWithMeta<T>) -> R,
|
||||
): Data<R> = asIterable().foldNamedToData(initial, coroutineContext, meta, block)
|
@ -1,32 +0,0 @@
|
||||
package hep.dataforge.data
|
||||
|
||||
import kotlin.test.Test
|
||||
import kotlin.test.assertTrue
|
||||
|
||||
|
||||
internal class DataTreeBuilderTest{
|
||||
@Test
|
||||
fun testDataUpdate(){
|
||||
val updateData = DataNode<Any>{
|
||||
"update" put {
|
||||
"a" put Data.static("a")
|
||||
"b" put Data.static("b")
|
||||
}
|
||||
}
|
||||
|
||||
val node = DataNode<Any>{
|
||||
node("primary"){
|
||||
static("a","a")
|
||||
static("b","b")
|
||||
}
|
||||
static("root","root")
|
||||
update(updateData)
|
||||
}
|
||||
|
||||
println(node.toMeta())
|
||||
|
||||
assertTrue { node["update.a"] != null }
|
||||
assertTrue { node["primary.a"] != null }
|
||||
|
||||
}
|
||||
}
|
@ -1,16 +0,0 @@
|
||||
package hep.dataforge.data
|
||||
|
||||
import kotlin.reflect.KClass
|
||||
|
||||
/**
|
||||
* Check that node is compatible with given type meaning that each element could be cast to the type
|
||||
*/
|
||||
actual fun <R : Any> DataNode<*>.canCast(type: KClass<out R>): Boolean {
|
||||
//Not supported in js yet
|
||||
return true
|
||||
}
|
||||
|
||||
actual fun <R : Any> Data<*>.canCast(type: KClass<out R>): Boolean {
|
||||
//Not supported in js yet
|
||||
return true
|
||||
}
|
@ -1,27 +0,0 @@
|
||||
package hep.dataforge.data
|
||||
|
||||
import hep.dataforge.meta.Meta
|
||||
import hep.dataforge.names.NameToken
|
||||
import kotlin.reflect.KClass
|
||||
|
||||
|
||||
/**
|
||||
* A zero-copy data node wrapper that returns only children with appropriate type.
|
||||
*/
|
||||
class TypeFilteredDataNode<out T : Any>(val origin: DataNode<*>, override val type: KClass<out T>) : DataNode<T> {
|
||||
override val meta: Meta get() = origin.meta
|
||||
override val items: Map<NameToken, DataItem<T>> by lazy {
|
||||
origin.items.mapNotNull { (key, item) ->
|
||||
when (item) {
|
||||
is DataItem.Leaf -> {
|
||||
(item.data.filterIsInstance(type))?.let {
|
||||
key to DataItem.Leaf(it)
|
||||
}
|
||||
}
|
||||
is DataItem.Node -> {
|
||||
key to DataItem.Node(item.node.filterIsInstance(type))
|
||||
}
|
||||
}
|
||||
}.associate { it }
|
||||
}
|
||||
}
|
@ -1,49 +0,0 @@
|
||||
package hep.dataforge.data
|
||||
|
||||
import kotlinx.coroutines.runBlocking
|
||||
import kotlin.reflect.KClass
|
||||
import kotlin.reflect.full.isSubclassOf
|
||||
import kotlin.reflect.full.isSuperclassOf
|
||||
|
||||
/**
|
||||
* Block the thread and get data content
|
||||
*/
|
||||
fun <T : Any> Data<T>.get(): T = runBlocking { await() }
|
||||
|
||||
/**
|
||||
* Check that node is compatible with given type meaning that each element could be cast to the type
|
||||
*/
|
||||
actual fun <R : Any> DataNode<*>.canCast(type: KClass<out R>): Boolean =
|
||||
type.isSuperclassOf(type)
|
||||
|
||||
actual fun <R : Any> Data<*>.canCast(type: KClass<out R>): Boolean =
|
||||
this.type.isSubclassOf(type)
|
||||
|
||||
/**
|
||||
* Cast the node to given type if the cast is possible or return null
|
||||
*/
|
||||
fun <R : Any> Data<*>.filterIsInstance(type: KClass<out R>): Data<R>? =
|
||||
if (canCast(type)) cast(type) else null
|
||||
|
||||
/**
|
||||
* Filter a node by data and node type. Resulting node and its subnodes is guaranteed to have border type [type],
|
||||
* but could contain empty nodes
|
||||
*/
|
||||
fun <R : Any> DataNode<*>.filterIsInstance(type: KClass<out R>): DataNode<R> {
|
||||
return when {
|
||||
canCast(type) -> cast(type)
|
||||
this is TypeFilteredDataNode -> origin.filterIsInstance(type)
|
||||
else -> TypeFilteredDataNode(this, type)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Filter all elements of given data item that could be cast to given type. If no elements are available, return null.
|
||||
*/
|
||||
fun <R : Any> DataItem<*>?.filterIsInstance(type: KClass<out R>): DataItem<R>? = when (this) {
|
||||
null -> null
|
||||
is DataItem.Node -> DataItem.Node(this.node.filterIsInstance(type))
|
||||
is DataItem.Leaf -> this.data.filterIsInstance(type)?.let { DataItem.Leaf(it) }
|
||||
}
|
||||
|
||||
inline fun <reified R : Any> DataItem<*>?.filterIsInstance(): DataItem<R>? = this@filterIsInstance.filterIsInstance(R::class)
|
@ -0,0 +1,2 @@
|
||||
package space.kscience.dataforge.data
|
||||
|
@ -0,0 +1,85 @@
|
||||
package space.kscience.dataforge.data
|
||||
|
||||
import kotlinx.coroutines.flow.Flow
|
||||
import kotlinx.coroutines.flow.filter
|
||||
import space.kscience.dataforge.meta.Meta
|
||||
import space.kscience.dataforge.misc.DFExperimental
|
||||
import space.kscience.dataforge.names.Name
|
||||
import kotlin.coroutines.CoroutineContext
|
||||
import kotlin.coroutines.EmptyCoroutineContext
|
||||
import kotlin.reflect.KType
|
||||
import kotlin.reflect.full.isSubtypeOf
|
||||
import kotlin.reflect.typeOf
|
||||
|
||||
|
||||
/**
|
||||
* Cast the node to given type if the cast is possible or return null
|
||||
*/
|
||||
@Suppress("UNCHECKED_CAST")
|
||||
private fun <R : Any> Data<*>.castOrNull(type: KType): Data<R>? =
|
||||
if (!this.type.isSubtypeOf(type)) {
|
||||
null
|
||||
} else {
|
||||
object : Data<R> by (this as Data<R>) {
|
||||
override val type: KType = type
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Select all data matching given type and filters. Does not modify paths
|
||||
*
|
||||
* @param predicate addition filtering condition based on item name and meta. By default, accepts all
|
||||
*/
|
||||
@OptIn(DFExperimental::class)
|
||||
public fun <R : Any> DataSet<*>.filterByType(
|
||||
type: KType,
|
||||
predicate: (name: Name, meta: Meta) -> Boolean = { _, _ -> true },
|
||||
): DataSource<R> = object : DataSource<R> {
|
||||
override val dataType = type
|
||||
|
||||
override val coroutineContext: CoroutineContext
|
||||
get() = (this@filterByType as? DataSource)?.coroutineContext ?: EmptyCoroutineContext
|
||||
|
||||
override val meta: Meta get() = this@filterByType.meta
|
||||
|
||||
private fun checkDatum(name: Name, datum: Data<*>): Boolean = datum.type.isSubtypeOf(type)
|
||||
&& predicate(name, datum.meta)
|
||||
|
||||
override fun iterator(): Iterator<NamedData<R>> = iterator {
|
||||
for(d in this@filterByType){
|
||||
if(checkDatum(d.name,d.data)){
|
||||
@Suppress("UNCHECKED_CAST")
|
||||
yield(d as NamedData<R>)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
override fun get(name: Name): Data<R>? = this@filterByType[name]?.let { datum ->
|
||||
if (checkDatum(name, datum)) datum.castOrNull(type) else null
|
||||
}
|
||||
|
||||
override val updates: Flow<Name> = this@filterByType.updates.filter { name ->
|
||||
get(name)?.let { datum ->
|
||||
checkDatum(name, datum)
|
||||
} ?: false
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Select a single datum of the appropriate type
|
||||
*/
|
||||
public inline fun <reified R : Any> DataSet<*>.filterByType(
|
||||
noinline predicate: (name: Name, meta: Meta) -> Boolean = { _, _ -> true },
|
||||
): DataSet<R> = filterByType(typeOf<R>(), predicate)
|
||||
|
||||
/**
|
||||
* Select a single datum if it is present and of given [type]
|
||||
*/
|
||||
public fun <R : Any> DataSet<*>.getByType(type: KType, name: Name): NamedData<R>? =
|
||||
get(name)?.castOrNull<R>(type)?.named(name)
|
||||
|
||||
public inline fun <reified R : Any> DataSet<*>.getByType(name: Name): NamedData<R>? =
|
||||
this@getByType.getByType(typeOf<R>(), name)
|
||||
|
||||
public inline fun <reified R : Any> DataSet<*>.getByType(name: String): NamedData<R>? =
|
||||
this@getByType.getByType(typeOf<R>(), Name.parse(name))
|
@ -0,0 +1,40 @@
|
||||
package space.kscience.dataforge.data
|
||||
|
||||
import kotlinx.coroutines.CoroutineScope
|
||||
import kotlinx.coroutines.Job
|
||||
import kotlinx.coroutines.launch
|
||||
import space.kscience.dataforge.names.Name
|
||||
import space.kscience.dataforge.names.plus
|
||||
|
||||
|
||||
/**
|
||||
* Append data to node
|
||||
*/
|
||||
context(DataSetBuilder<T>) public infix fun <T : Any> String.put(data: Data<T>): Unit =
|
||||
data(Name.parse(this), data)
|
||||
|
||||
/**
|
||||
* Append node
|
||||
*/
|
||||
context(DataSetBuilder<T>) public infix fun <T : Any> String.put(dataSet: DataSet<T>): Unit =
|
||||
node(Name.parse(this), dataSet)
|
||||
|
||||
/**
|
||||
* Build and append node
|
||||
*/
|
||||
context(DataSetBuilder<T>) public infix fun <T : Any> String.put(
|
||||
block: DataSetBuilder<T>.() -> Unit,
|
||||
): Unit = node(Name.parse(this), block)
|
||||
|
||||
/**
|
||||
* Copy given data set and mirror its changes to this [DataTreeBuilder] in [this@setAndObserve]. Returns an update [Job]
|
||||
*/
|
||||
context(DataSetBuilder<T>) public fun <T : Any> CoroutineScope.setAndWatch(
|
||||
name: Name,
|
||||
dataSet: DataSet<T>,
|
||||
): Job = launch {
|
||||
node(name, dataSet)
|
||||
dataSet.updates.collect { nameInBranch ->
|
||||
data(name + nameInBranch, dataSet.get(nameInBranch))
|
||||
}
|
||||
}
|
@ -0,0 +1,50 @@
|
||||
package space.kscience.dataforge.data
|
||||
|
||||
import kotlinx.coroutines.ExperimentalCoroutinesApi
|
||||
import kotlinx.coroutines.delay
|
||||
import kotlinx.coroutines.test.runTest
|
||||
import org.junit.jupiter.api.Test
|
||||
import space.kscience.dataforge.actions.Action
|
||||
import space.kscience.dataforge.actions.invoke
|
||||
import space.kscience.dataforge.actions.map
|
||||
import space.kscience.dataforge.misc.DFExperimental
|
||||
import kotlin.test.assertEquals
|
||||
|
||||
@OptIn(DFExperimental::class, ExperimentalCoroutinesApi::class)
|
||||
internal class ActionsTest {
|
||||
@Test
|
||||
fun testStaticMapAction() = runTest {
|
||||
val data: DataTree<Int> = DataTree {
|
||||
repeat(10) {
|
||||
static(it.toString(), it)
|
||||
}
|
||||
}
|
||||
|
||||
val plusOne = Action.map<Int, Int> {
|
||||
result { it + 1 }
|
||||
}
|
||||
val result = plusOne(data)
|
||||
assertEquals(2, result["1"]?.await())
|
||||
}
|
||||
|
||||
@Test
|
||||
fun testDynamicMapAction() = runTest {
|
||||
val data: DataSourceBuilder<Int> = DataSource()
|
||||
|
||||
val plusOne = Action.map<Int, Int> {
|
||||
result { it + 1 }
|
||||
}
|
||||
|
||||
val result = plusOne(data)
|
||||
|
||||
repeat(10) {
|
||||
data.static(it.toString(), it)
|
||||
}
|
||||
|
||||
delay(20)
|
||||
|
||||
assertEquals(2, result["1"]?.await())
|
||||
data.close()
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,91 @@
|
||||
package space.kscience.dataforge.data
|
||||
|
||||
import kotlinx.coroutines.*
|
||||
import space.kscience.dataforge.misc.DFExperimental
|
||||
import space.kscience.dataforge.names.asName
|
||||
import kotlin.test.Test
|
||||
import kotlin.test.assertEquals
|
||||
|
||||
|
||||
internal class DataTreeBuilderTest {
|
||||
@Test
|
||||
fun testTreeBuild() = runBlocking {
|
||||
val node = DataTree<Any> {
|
||||
"primary" put {
|
||||
static("a", "a")
|
||||
static("b", "b")
|
||||
}
|
||||
static("c.d", "c.d")
|
||||
static("c.f", "c.f")
|
||||
}
|
||||
runBlocking {
|
||||
assertEquals("a", node["primary.a"]?.await())
|
||||
assertEquals("b", node["primary.b"]?.await())
|
||||
assertEquals("c.d", node["c.d"]?.await())
|
||||
assertEquals("c.f", node["c.f"]?.await())
|
||||
}
|
||||
}
|
||||
|
||||
@OptIn(DFExperimental::class)
|
||||
@Test
|
||||
fun testDataUpdate() = runBlocking {
|
||||
val updateData: DataTree<Any> = DataTree {
|
||||
"update" put {
|
||||
"a" put Data.static("a")
|
||||
"b" put Data.static("b")
|
||||
}
|
||||
}
|
||||
|
||||
val node = DataTree<Any> {
|
||||
"primary" put {
|
||||
static("a", "a")
|
||||
static("b", "b")
|
||||
}
|
||||
static("root", "root")
|
||||
populateFrom(updateData)
|
||||
}
|
||||
|
||||
runBlocking {
|
||||
assertEquals("a", node["update.a"]?.await())
|
||||
assertEquals("a", node["primary.a"]?.await())
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
fun testDynamicUpdates() = runBlocking {
|
||||
try {
|
||||
lateinit var updateJob: Job
|
||||
supervisorScope {
|
||||
val subNode = DataSource<Int> {
|
||||
updateJob = launch {
|
||||
repeat(10) {
|
||||
delay(10)
|
||||
static("value", it)
|
||||
}
|
||||
delay(10)
|
||||
}
|
||||
}
|
||||
launch {
|
||||
subNode.updatesWithData.collect {
|
||||
println(it)
|
||||
}
|
||||
}
|
||||
val rootNode = DataSource<Int> {
|
||||
setAndWatch("sub".asName(), subNode)
|
||||
}
|
||||
|
||||
launch {
|
||||
rootNode.updatesWithData.collect {
|
||||
println(it)
|
||||
}
|
||||
}
|
||||
updateJob.join()
|
||||
assertEquals(9, rootNode["sub.value"]?.await())
|
||||
cancel()
|
||||
}
|
||||
} catch (t: Throwable) {
|
||||
if (t !is CancellationException) throw t
|
||||
}
|
||||
|
||||
}
|
||||
}
|
23
dataforge-io/README.md
Normal file
23
dataforge-io/README.md
Normal file
@ -0,0 +1,23 @@
|
||||
# Module dataforge-io
|
||||
|
||||
IO module
|
||||
|
||||
## Usage
|
||||
|
||||
## Artifact:
|
||||
|
||||
The Maven coordinates of this project are `space.kscience:dataforge-io:0.7.0`.
|
||||
|
||||
**Gradle Kotlin DSL:**
|
||||
```kotlin
|
||||
repositories {
|
||||
maven("https://repo.kotlin.link")
|
||||
//uncomment to access development builds
|
||||
//maven("https://maven.pkg.jetbrains.space/spc/p/sci/dev")
|
||||
mavenCentral()
|
||||
}
|
||||
|
||||
dependencies {
|
||||
implementation("space.kscience:dataforge-io:0.7.0")
|
||||
}
|
||||
```
|
473
dataforge-io/api/dataforge-io.api
Normal file
473
dataforge-io/api/dataforge-io.api
Normal file
@ -0,0 +1,473 @@
|
||||
public final class hep/dataforge/io/BinaryMetaFormat : hep/dataforge/io/MetaFormat, hep/dataforge/io/MetaFormatFactory {
|
||||
public static final field INSTANCE Lhep/dataforge/io/BinaryMetaFormat;
|
||||
public fun getKey ()S
|
||||
public fun getName ()Lhep/dataforge/names/Name;
|
||||
public fun getShortName ()Ljava/lang/String;
|
||||
public fun getType ()Lkotlin/reflect/KClass;
|
||||
public fun invoke (Lhep/dataforge/meta/Meta;Lhep/dataforge/context/Context;)Lhep/dataforge/io/MetaFormat;
|
||||
public synthetic fun invoke (Lhep/dataforge/meta/Meta;Lhep/dataforge/context/Context;)Ljava/lang/Object;
|
||||
public fun readMeta (Lkotlinx/io/Input;Lhep/dataforge/meta/descriptors/NodeDescriptor;)Lhep/dataforge/meta/Meta;
|
||||
public final fun readMetaItem (Lkotlinx/io/Input;)Lhep/dataforge/meta/TypedMetaItem;
|
||||
public fun readObject (Lkotlinx/io/Input;)Lhep/dataforge/meta/Meta;
|
||||
public synthetic fun readObject (Lkotlinx/io/Input;)Ljava/lang/Object;
|
||||
public fun toMeta ()Lhep/dataforge/meta/Meta;
|
||||
public fun writeMeta (Lkotlinx/io/Output;Lhep/dataforge/meta/Meta;Lhep/dataforge/meta/descriptors/NodeDescriptor;)V
|
||||
public fun writeObject (Lkotlinx/io/Output;Lhep/dataforge/meta/Meta;)V
|
||||
public synthetic fun writeObject (Lkotlinx/io/Output;Ljava/lang/Object;)V
|
||||
public final fun writeValue (Lkotlinx/io/Output;Lhep/dataforge/values/Value;)V
|
||||
}
|
||||
|
||||
public final class hep/dataforge/io/BinaryView : kotlinx/io/Binary {
|
||||
public fun <init> (Lkotlinx/io/Binary;II)V
|
||||
public fun getSize ()I
|
||||
public fun read (IILkotlin/jvm/functions/Function1;)Ljava/lang/Object;
|
||||
}
|
||||
|
||||
public abstract interface class hep/dataforge/io/Consumer {
|
||||
public abstract fun consume (Lhep/dataforge/io/Envelope;)V
|
||||
}
|
||||
|
||||
public final class hep/dataforge/io/DoubleIOFormat : hep/dataforge/io/IOFormat, hep/dataforge/io/IOFormatFactory {
|
||||
public static final field INSTANCE Lhep/dataforge/io/DoubleIOFormat;
|
||||
public fun getName ()Lhep/dataforge/names/Name;
|
||||
public fun getType ()Lkotlin/reflect/KClass;
|
||||
public fun invoke (Lhep/dataforge/meta/Meta;Lhep/dataforge/context/Context;)Lhep/dataforge/io/IOFormat;
|
||||
public synthetic fun invoke (Lhep/dataforge/meta/Meta;Lhep/dataforge/context/Context;)Ljava/lang/Object;
|
||||
public fun readObject (Lkotlinx/io/Input;)Ljava/lang/Double;
|
||||
public synthetic fun readObject (Lkotlinx/io/Input;)Ljava/lang/Object;
|
||||
public fun toMeta ()Lhep/dataforge/meta/Meta;
|
||||
public fun writeObject (Lkotlinx/io/Output;D)V
|
||||
public synthetic fun writeObject (Lkotlinx/io/Output;Ljava/lang/Object;)V
|
||||
}
|
||||
|
||||
public abstract interface class hep/dataforge/io/Envelope {
|
||||
public static final field Companion Lhep/dataforge/io/Envelope$Companion;
|
||||
public abstract fun getData ()Lkotlinx/io/Binary;
|
||||
public abstract fun getMeta ()Lhep/dataforge/meta/Meta;
|
||||
}
|
||||
|
||||
public final class hep/dataforge/io/Envelope$Companion {
|
||||
public final fun getENVELOPE_DATA_ID_KEY ()Lhep/dataforge/names/Name;
|
||||
public final fun getENVELOPE_DATA_TYPE_KEY ()Lhep/dataforge/names/Name;
|
||||
public final fun getENVELOPE_DESCRIPTION_KEY ()Lhep/dataforge/names/Name;
|
||||
public final fun getENVELOPE_NAME_KEY ()Lhep/dataforge/names/Name;
|
||||
public final fun getENVELOPE_NODE_KEY ()Lhep/dataforge/names/Name;
|
||||
public final fun getENVELOPE_TYPE_KEY ()Lhep/dataforge/names/Name;
|
||||
public final fun invoke (Lkotlin/jvm/functions/Function1;)Lhep/dataforge/io/Envelope;
|
||||
}
|
||||
|
||||
public final class hep/dataforge/io/EnvelopeBuilder : hep/dataforge/io/Envelope {
|
||||
public fun <init> ()V
|
||||
public final fun data (Lkotlin/jvm/functions/Function1;)V
|
||||
public fun getData ()Lkotlinx/io/Binary;
|
||||
public final fun getDataID ()Ljava/lang/String;
|
||||
public final fun getDataType ()Ljava/lang/String;
|
||||
public final fun getDescription ()Ljava/lang/String;
|
||||
public fun getMeta ()Lhep/dataforge/meta/Meta;
|
||||
public final fun getName ()Ljava/lang/String;
|
||||
public final fun getType ()Ljava/lang/String;
|
||||
public final fun meta (Lkotlin/jvm/functions/Function1;)V
|
||||
public final fun seal ()Lhep/dataforge/io/Envelope;
|
||||
public fun setData (Lkotlinx/io/Binary;)V
|
||||
public final fun setDataID (Ljava/lang/String;)V
|
||||
public final fun setDataType (Ljava/lang/String;)V
|
||||
public final fun setDescription (Ljava/lang/String;)V
|
||||
public fun setMeta (Lhep/dataforge/meta/Meta;)V
|
||||
public final fun setName (Ljava/lang/String;)V
|
||||
public final fun setType (Ljava/lang/String;)V
|
||||
}
|
||||
|
||||
public final class hep/dataforge/io/EnvelopeBuilderKt {
|
||||
public static final fun Envelope (Lkotlin/jvm/functions/Function1;)Lhep/dataforge/io/Envelope;
|
||||
}
|
||||
|
||||
public abstract interface class hep/dataforge/io/EnvelopeFormat : hep/dataforge/io/IOFormat {
|
||||
public abstract fun getDefaultMetaFormat ()Lhep/dataforge/io/MetaFormatFactory;
|
||||
public abstract fun readObject (Lkotlinx/io/Input;)Lhep/dataforge/io/Envelope;
|
||||
public abstract fun readPartial (Lkotlinx/io/Input;)Lhep/dataforge/io/PartialEnvelope;
|
||||
public abstract fun writeEnvelope (Lkotlinx/io/Output;Lhep/dataforge/io/Envelope;Lhep/dataforge/io/MetaFormatFactory;Lhep/dataforge/meta/Meta;)V
|
||||
public abstract fun writeObject (Lkotlinx/io/Output;Lhep/dataforge/io/Envelope;)V
|
||||
}
|
||||
|
||||
public final class hep/dataforge/io/EnvelopeFormat$DefaultImpls {
|
||||
public static fun getDefaultMetaFormat (Lhep/dataforge/io/EnvelopeFormat;)Lhep/dataforge/io/MetaFormatFactory;
|
||||
public static synthetic fun writeEnvelope$default (Lhep/dataforge/io/EnvelopeFormat;Lkotlinx/io/Output;Lhep/dataforge/io/Envelope;Lhep/dataforge/io/MetaFormatFactory;Lhep/dataforge/meta/Meta;ILjava/lang/Object;)V
|
||||
public static fun writeObject (Lhep/dataforge/io/EnvelopeFormat;Lkotlinx/io/Output;Lhep/dataforge/io/Envelope;)V
|
||||
}
|
||||
|
||||
public abstract interface class hep/dataforge/io/EnvelopeFormatFactory : hep/dataforge/io/EnvelopeFormat, hep/dataforge/io/IOFormatFactory {
|
||||
public static final field Companion Lhep/dataforge/io/EnvelopeFormatFactory$Companion;
|
||||
public static final field ENVELOPE_FORMAT_TYPE Ljava/lang/String;
|
||||
public abstract fun getName ()Lhep/dataforge/names/Name;
|
||||
public abstract fun getType ()Lkotlin/reflect/KClass;
|
||||
public abstract fun invoke (Lhep/dataforge/meta/Meta;Lhep/dataforge/context/Context;)Lhep/dataforge/io/EnvelopeFormat;
|
||||
public abstract fun peekFormat (Lhep/dataforge/io/IOPlugin;Lkotlinx/io/Input;)Lhep/dataforge/io/EnvelopeFormat;
|
||||
}
|
||||
|
||||
public final class hep/dataforge/io/EnvelopeFormatFactory$Companion {
|
||||
public static final field ENVELOPE_FORMAT_TYPE Ljava/lang/String;
|
||||
}
|
||||
|
||||
public final class hep/dataforge/io/EnvelopeFormatFactory$DefaultImpls {
|
||||
public static fun getDefaultMetaFormat (Lhep/dataforge/io/EnvelopeFormatFactory;)Lhep/dataforge/io/MetaFormatFactory;
|
||||
public static fun getName (Lhep/dataforge/io/EnvelopeFormatFactory;)Lhep/dataforge/names/Name;
|
||||
public static fun getType (Lhep/dataforge/io/EnvelopeFormatFactory;)Lkotlin/reflect/KClass;
|
||||
public static fun toMeta (Lhep/dataforge/io/EnvelopeFormatFactory;)Lhep/dataforge/meta/Meta;
|
||||
public static fun writeObject (Lhep/dataforge/io/EnvelopeFormatFactory;Lkotlinx/io/Output;Lhep/dataforge/io/Envelope;)V
|
||||
}
|
||||
|
||||
public final class hep/dataforge/io/EnvelopeFormatKt {
|
||||
public static final fun read (Lhep/dataforge/io/EnvelopeFormat;Lkotlinx/io/Input;)Lhep/dataforge/io/Envelope;
|
||||
}
|
||||
|
||||
public final class hep/dataforge/io/EnvelopeKt {
|
||||
public static final fun contentEquals (Lhep/dataforge/io/Envelope;Lhep/dataforge/io/Envelope;)Z
|
||||
public static final fun dataEquals (Lhep/dataforge/io/Envelope;Lhep/dataforge/io/Envelope;)Z
|
||||
public static final fun getDataID (Lhep/dataforge/io/Envelope;)Ljava/lang/String;
|
||||
public static final fun getDataType (Lhep/dataforge/io/Envelope;)Ljava/lang/String;
|
||||
public static final fun getDescription (Lhep/dataforge/io/Envelope;)Ljava/lang/String;
|
||||
public static final fun getType (Lhep/dataforge/io/Envelope;)Ljava/lang/String;
|
||||
public static final fun metaEquals (Lhep/dataforge/io/Envelope;Lhep/dataforge/io/Envelope;)Z
|
||||
public static final fun withMetaLayers (Lhep/dataforge/io/Envelope;[Lhep/dataforge/meta/Meta;)Lhep/dataforge/io/Envelope;
|
||||
}
|
||||
|
||||
public final class hep/dataforge/io/EnvelopePart {
|
||||
public fun <init> (Lkotlinx/io/Binary;Lhep/dataforge/meta/Meta;)V
|
||||
public final fun component1 ()Lkotlinx/io/Binary;
|
||||
public final fun component2 ()Lhep/dataforge/meta/Meta;
|
||||
public final fun copy (Lkotlinx/io/Binary;Lhep/dataforge/meta/Meta;)Lhep/dataforge/io/EnvelopePart;
|
||||
public static synthetic fun copy$default (Lhep/dataforge/io/EnvelopePart;Lkotlinx/io/Binary;Lhep/dataforge/meta/Meta;ILjava/lang/Object;)Lhep/dataforge/io/EnvelopePart;
|
||||
public fun equals (Ljava/lang/Object;)Z
|
||||
public final fun getBinary ()Lkotlinx/io/Binary;
|
||||
public final fun getDescription ()Lhep/dataforge/meta/Meta;
|
||||
public fun hashCode ()I
|
||||
public fun toString ()Ljava/lang/String;
|
||||
}
|
||||
|
||||
public final class hep/dataforge/io/EnvelopePartsKt {
|
||||
public static final fun envelope (Lhep/dataforge/io/EnvelopePart;Lhep/dataforge/io/EnvelopeFormat;)Lhep/dataforge/io/Envelope;
|
||||
public static final fun envelope (Lhep/dataforge/io/EnvelopePart;Lhep/dataforge/io/IOPlugin;)Lhep/dataforge/io/Envelope;
|
||||
public static final fun envelopes (Lhep/dataforge/io/EnvelopeBuilder;Ljava/util/List;Lhep/dataforge/io/EnvelopeFormat;Ljava/lang/String;)V
|
||||
public static synthetic fun envelopes$default (Lhep/dataforge/io/EnvelopeBuilder;Ljava/util/List;Lhep/dataforge/io/EnvelopeFormat;Ljava/lang/String;ILjava/lang/Object;)V
|
||||
public static final fun getName (Lhep/dataforge/io/EnvelopePart;)Ljava/lang/String;
|
||||
public static final fun multipart (Lhep/dataforge/io/EnvelopeBuilder;Ljava/util/List;Ljava/lang/String;)V
|
||||
public static synthetic fun multipart$default (Lhep/dataforge/io/EnvelopeBuilder;Ljava/util/List;Ljava/lang/String;ILjava/lang/Object;)V
|
||||
public static final fun parts (Lhep/dataforge/io/Envelope;)Ljava/util/List;
|
||||
}
|
||||
|
||||
public final class hep/dataforge/io/FileIOKt {
|
||||
public static final fun append (Ljava/nio/file/Path;Lkotlin/jvm/functions/Function1;)V
|
||||
public static final fun getDATA_FILE_NAME (Lhep/dataforge/io/IOPlugin$Companion;)Ljava/lang/String;
|
||||
public static final fun getMETA_FILE_NAME (Lhep/dataforge/io/IOPlugin$Companion;)Ljava/lang/String;
|
||||
public static final fun peekBinaryFormat (Lhep/dataforge/io/IOPlugin;Ljava/nio/file/Path;)Lhep/dataforge/io/EnvelopeFormat;
|
||||
public static final fun read (Ljava/nio/file/Path;Lkotlin/jvm/functions/Function1;)Ljava/lang/Object;
|
||||
public static final fun readEnvelope (Ljava/nio/file/Path;Lhep/dataforge/io/EnvelopeFormat;)Lhep/dataforge/io/Envelope;
|
||||
public static final fun readEnvelopeFile (Lhep/dataforge/io/IOPlugin;Ljava/nio/file/Path;ZLkotlin/jvm/functions/Function2;)Lhep/dataforge/io/Envelope;
|
||||
public static synthetic fun readEnvelopeFile$default (Lhep/dataforge/io/IOPlugin;Ljava/nio/file/Path;ZLkotlin/jvm/functions/Function2;ILjava/lang/Object;)Lhep/dataforge/io/Envelope;
|
||||
public static final fun readMetaFile (Lhep/dataforge/io/IOPlugin;Ljava/nio/file/Path;Lhep/dataforge/io/MetaFormat;Lhep/dataforge/meta/descriptors/NodeDescriptor;)Lhep/dataforge/meta/Meta;
|
||||
public static synthetic fun readMetaFile$default (Lhep/dataforge/io/IOPlugin;Ljava/nio/file/Path;Lhep/dataforge/io/MetaFormat;Lhep/dataforge/meta/descriptors/NodeDescriptor;ILjava/lang/Object;)Lhep/dataforge/meta/Meta;
|
||||
public static final fun rewrite (Ljava/nio/file/Path;Lkotlin/jvm/functions/Function1;)V
|
||||
public static final fun write (Ljava/nio/file/Path;Lkotlin/jvm/functions/Function1;)V
|
||||
public static final fun writeEnvelopeDirectory (Lhep/dataforge/io/IOPlugin;Ljava/nio/file/Path;Lhep/dataforge/io/Envelope;Lhep/dataforge/io/MetaFormatFactory;)V
|
||||
public static synthetic fun writeEnvelopeDirectory$default (Lhep/dataforge/io/IOPlugin;Ljava/nio/file/Path;Lhep/dataforge/io/Envelope;Lhep/dataforge/io/MetaFormatFactory;ILjava/lang/Object;)V
|
||||
public static final fun writeEnvelopeFile (Lhep/dataforge/io/IOPlugin;Ljava/nio/file/Path;Lhep/dataforge/io/Envelope;Lhep/dataforge/io/EnvelopeFormat;Lhep/dataforge/io/MetaFormatFactory;)V
|
||||
public static synthetic fun writeEnvelopeFile$default (Lhep/dataforge/io/IOPlugin;Ljava/nio/file/Path;Lhep/dataforge/io/Envelope;Lhep/dataforge/io/EnvelopeFormat;Lhep/dataforge/io/MetaFormatFactory;ILjava/lang/Object;)V
|
||||
public static final fun writeMetaFile (Lhep/dataforge/io/IOPlugin;Ljava/nio/file/Path;Lhep/dataforge/meta/Meta;Lhep/dataforge/io/MetaFormatFactory;Lhep/dataforge/meta/descriptors/NodeDescriptor;)V
|
||||
public static synthetic fun writeMetaFile$default (Lhep/dataforge/io/IOPlugin;Ljava/nio/file/Path;Lhep/dataforge/meta/Meta;Lhep/dataforge/io/MetaFormatFactory;Lhep/dataforge/meta/descriptors/NodeDescriptor;ILjava/lang/Object;)V
|
||||
public static final fun writeToFile (Lhep/dataforge/io/IOFormat;Ljava/nio/file/Path;Ljava/lang/Object;)V
|
||||
}
|
||||
|
||||
public abstract interface class hep/dataforge/io/IOFormat : hep/dataforge/meta/MetaRepr {
|
||||
public static final field Companion Lhep/dataforge/io/IOFormat$Companion;
|
||||
public abstract fun readObject (Lkotlinx/io/Input;)Ljava/lang/Object;
|
||||
public abstract fun writeObject (Lkotlinx/io/Output;Ljava/lang/Object;)V
|
||||
}
|
||||
|
||||
public final class hep/dataforge/io/IOFormat$Companion {
|
||||
public final fun getMETA_KEY ()Lhep/dataforge/names/Name;
|
||||
public final fun getNAME_KEY ()Lhep/dataforge/names/Name;
|
||||
}
|
||||
|
||||
public abstract interface class hep/dataforge/io/IOFormatFactory : hep/dataforge/context/Factory, hep/dataforge/context/Named, hep/dataforge/meta/MetaRepr {
|
||||
public static final field Companion Lhep/dataforge/io/IOFormatFactory$Companion;
|
||||
public static final field IO_FORMAT_TYPE Ljava/lang/String;
|
||||
public abstract fun getType ()Lkotlin/reflect/KClass;
|
||||
public abstract fun toMeta ()Lhep/dataforge/meta/Meta;
|
||||
}
|
||||
|
||||
public final class hep/dataforge/io/IOFormatFactory$Companion {
|
||||
public static final field IO_FORMAT_TYPE Ljava/lang/String;
|
||||
}
|
||||
|
||||
public final class hep/dataforge/io/IOFormatFactory$DefaultImpls {
|
||||
public static fun toMeta (Lhep/dataforge/io/IOFormatFactory;)Lhep/dataforge/meta/Meta;
|
||||
}
|
||||
|
||||
public final class hep/dataforge/io/IOFormatKt {
|
||||
public static final fun fill (Lkotlinx/io/pool/ObjectPool;Lkotlin/jvm/functions/Function1;)Ljava/nio/ByteBuffer;
|
||||
public static final fun readWith (Lkotlinx/io/Binary;Lhep/dataforge/io/IOFormat;)Ljava/lang/Object;
|
||||
public static final fun readWith (Lkotlinx/io/Input;Lhep/dataforge/io/IOFormat;)Ljava/lang/Object;
|
||||
public static final fun toBinary (Lhep/dataforge/io/IOFormat;Ljava/lang/Object;)Lkotlinx/io/Binary;
|
||||
public static final fun writeWith (Lkotlinx/io/Output;Lhep/dataforge/io/IOFormat;Ljava/lang/Object;)V
|
||||
}
|
||||
|
||||
public final class hep/dataforge/io/IOPlugin : hep/dataforge/context/AbstractPlugin {
|
||||
public static final field Companion Lhep/dataforge/io/IOPlugin$Companion;
|
||||
public fun <init> (Lhep/dataforge/meta/Meta;)V
|
||||
public fun content (Ljava/lang/String;)Ljava/util/Map;
|
||||
public final fun getEnvelopeFormatFactories ()Ljava/util/Collection;
|
||||
public final fun getIoFormatFactories ()Ljava/util/Collection;
|
||||
public final fun getMetaFormatFactories ()Ljava/util/Collection;
|
||||
public fun getTag ()Lhep/dataforge/context/PluginTag;
|
||||
public final fun resolveEnvelopeFormat (Lhep/dataforge/meta/TypedMetaItem;)Lhep/dataforge/io/EnvelopeFormat;
|
||||
public final fun resolveIOFormat (Lhep/dataforge/meta/TypedMetaItem;Lkotlin/reflect/KClass;)Lhep/dataforge/io/IOFormat;
|
||||
public final fun resolveMetaFormat (Ljava/lang/String;Lhep/dataforge/meta/Meta;)Lhep/dataforge/io/MetaFormat;
|
||||
public final fun resolveMetaFormat (SLhep/dataforge/meta/Meta;)Lhep/dataforge/io/MetaFormat;
|
||||
public static synthetic fun resolveMetaFormat$default (Lhep/dataforge/io/IOPlugin;Ljava/lang/String;Lhep/dataforge/meta/Meta;ILjava/lang/Object;)Lhep/dataforge/io/MetaFormat;
|
||||
public static synthetic fun resolveMetaFormat$default (Lhep/dataforge/io/IOPlugin;SLhep/dataforge/meta/Meta;ILjava/lang/Object;)Lhep/dataforge/io/MetaFormat;
|
||||
}
|
||||
|
||||
public final class hep/dataforge/io/IOPlugin$Companion : hep/dataforge/context/PluginFactory {
|
||||
public final fun getDefaultEnvelopeFormats ()Ljava/util/List;
|
||||
public final fun getDefaultMetaFormats ()Ljava/util/List;
|
||||
public fun getTag ()Lhep/dataforge/context/PluginTag;
|
||||
public fun getType ()Lkotlin/reflect/KClass;
|
||||
public fun invoke (Lhep/dataforge/meta/Meta;Lhep/dataforge/context/Context;)Lhep/dataforge/io/IOPlugin;
|
||||
public synthetic fun invoke (Lhep/dataforge/meta/Meta;Lhep/dataforge/context/Context;)Ljava/lang/Object;
|
||||
}
|
||||
|
||||
public final class hep/dataforge/io/IOPluginKt {
|
||||
public static final fun getIo (Lhep/dataforge/context/Context;)Lhep/dataforge/io/IOPlugin;
|
||||
}
|
||||
|
||||
public final class hep/dataforge/io/IoMiscKt {
|
||||
public static final fun Binary (ILkotlin/jvm/functions/Function1;)Lkotlinx/io/Binary;
|
||||
public static synthetic fun Binary$default (ILkotlin/jvm/functions/Function1;ILjava/lang/Object;)Lkotlinx/io/Binary;
|
||||
public static final fun buildByteArray (ILkotlin/jvm/functions/Function1;)[B
|
||||
public static synthetic fun buildByteArray$default (ILkotlin/jvm/functions/Function1;ILjava/lang/Object;)[B
|
||||
public static final fun get (Lkotlinx/io/Binary;Lkotlin/ranges/IntRange;)Lhep/dataforge/io/BinaryView;
|
||||
public static final fun readRawString (Lkotlinx/io/Input;I)Ljava/lang/String;
|
||||
public static final fun view (Lkotlinx/io/Binary;II)Lhep/dataforge/io/BinaryView;
|
||||
public static final fun writeRawString (Lkotlinx/io/Output;Ljava/lang/String;)V
|
||||
}
|
||||
|
||||
public final class hep/dataforge/io/JsonMetaFormat : hep/dataforge/io/MetaFormat {
|
||||
public static final field Companion Lhep/dataforge/io/JsonMetaFormat$Companion;
|
||||
public fun <init> ()V
|
||||
public fun <init> (Lkotlinx/serialization/json/Json;)V
|
||||
public synthetic fun <init> (Lkotlinx/serialization/json/Json;ILkotlin/jvm/internal/DefaultConstructorMarker;)V
|
||||
public fun readMeta (Lkotlinx/io/Input;Lhep/dataforge/meta/descriptors/NodeDescriptor;)Lhep/dataforge/meta/Meta;
|
||||
public fun readObject (Lkotlinx/io/Input;)Lhep/dataforge/meta/Meta;
|
||||
public synthetic fun readObject (Lkotlinx/io/Input;)Ljava/lang/Object;
|
||||
public fun toMeta ()Lhep/dataforge/meta/Meta;
|
||||
public fun writeMeta (Lkotlinx/io/Output;Lhep/dataforge/meta/Meta;Lhep/dataforge/meta/descriptors/NodeDescriptor;)V
|
||||
public fun writeObject (Lkotlinx/io/Output;Lhep/dataforge/meta/Meta;)V
|
||||
public synthetic fun writeObject (Lkotlinx/io/Output;Ljava/lang/Object;)V
|
||||
}
|
||||
|
||||
public final class hep/dataforge/io/JsonMetaFormat$Companion : hep/dataforge/io/MetaFormatFactory {
|
||||
public final fun getDEFAULT_JSON ()Lkotlinx/serialization/json/Json;
|
||||
public fun getKey ()S
|
||||
public fun getName ()Lhep/dataforge/names/Name;
|
||||
public fun getShortName ()Ljava/lang/String;
|
||||
public fun getType ()Lkotlin/reflect/KClass;
|
||||
public fun invoke (Lhep/dataforge/meta/Meta;Lhep/dataforge/context/Context;)Lhep/dataforge/io/MetaFormat;
|
||||
public synthetic fun invoke (Lhep/dataforge/meta/Meta;Lhep/dataforge/context/Context;)Ljava/lang/Object;
|
||||
public fun readMeta (Lkotlinx/io/Input;Lhep/dataforge/meta/descriptors/NodeDescriptor;)Lhep/dataforge/meta/Meta;
|
||||
public fun readObject (Lkotlinx/io/Input;)Lhep/dataforge/meta/Meta;
|
||||
public synthetic fun readObject (Lkotlinx/io/Input;)Ljava/lang/Object;
|
||||
public fun toMeta ()Lhep/dataforge/meta/Meta;
|
||||
public fun writeMeta (Lkotlinx/io/Output;Lhep/dataforge/meta/Meta;Lhep/dataforge/meta/descriptors/NodeDescriptor;)V
|
||||
public fun writeObject (Lkotlinx/io/Output;Lhep/dataforge/meta/Meta;)V
|
||||
public synthetic fun writeObject (Lkotlinx/io/Output;Ljava/lang/Object;)V
|
||||
}
|
||||
|
||||
public final class hep/dataforge/io/ListIOFormat : hep/dataforge/io/IOFormat {
|
||||
public fun <init> (Lhep/dataforge/io/IOFormat;)V
|
||||
public final fun getFormat ()Lhep/dataforge/io/IOFormat;
|
||||
public synthetic fun readObject (Lkotlinx/io/Input;)Ljava/lang/Object;
|
||||
public fun readObject (Lkotlinx/io/Input;)Ljava/util/List;
|
||||
public fun toMeta ()Lhep/dataforge/meta/Meta;
|
||||
public synthetic fun writeObject (Lkotlinx/io/Output;Ljava/lang/Object;)V
|
||||
public fun writeObject (Lkotlinx/io/Output;Ljava/util/List;)V
|
||||
}
|
||||
|
||||
public abstract interface class hep/dataforge/io/MetaFormat : hep/dataforge/io/IOFormat {
|
||||
public abstract fun readMeta (Lkotlinx/io/Input;Lhep/dataforge/meta/descriptors/NodeDescriptor;)Lhep/dataforge/meta/Meta;
|
||||
public abstract fun readObject (Lkotlinx/io/Input;)Lhep/dataforge/meta/Meta;
|
||||
public abstract fun writeMeta (Lkotlinx/io/Output;Lhep/dataforge/meta/Meta;Lhep/dataforge/meta/descriptors/NodeDescriptor;)V
|
||||
public abstract fun writeObject (Lkotlinx/io/Output;Lhep/dataforge/meta/Meta;)V
|
||||
}
|
||||
|
||||
public final class hep/dataforge/io/MetaFormat$DefaultImpls {
|
||||
public static synthetic fun readMeta$default (Lhep/dataforge/io/MetaFormat;Lkotlinx/io/Input;Lhep/dataforge/meta/descriptors/NodeDescriptor;ILjava/lang/Object;)Lhep/dataforge/meta/Meta;
|
||||
public static fun readObject (Lhep/dataforge/io/MetaFormat;Lkotlinx/io/Input;)Lhep/dataforge/meta/Meta;
|
||||
public static synthetic fun writeMeta$default (Lhep/dataforge/io/MetaFormat;Lkotlinx/io/Output;Lhep/dataforge/meta/Meta;Lhep/dataforge/meta/descriptors/NodeDescriptor;ILjava/lang/Object;)V
|
||||
public static fun writeObject (Lhep/dataforge/io/MetaFormat;Lkotlinx/io/Output;Lhep/dataforge/meta/Meta;)V
|
||||
}
|
||||
|
||||
public abstract interface class hep/dataforge/io/MetaFormatFactory : hep/dataforge/io/IOFormatFactory, hep/dataforge/io/MetaFormat {
|
||||
public static final field Companion Lhep/dataforge/io/MetaFormatFactory$Companion;
|
||||
public static final field META_FORMAT_TYPE Ljava/lang/String;
|
||||
public abstract fun getKey ()S
|
||||
public abstract fun getName ()Lhep/dataforge/names/Name;
|
||||
public abstract fun getShortName ()Ljava/lang/String;
|
||||
public abstract fun getType ()Lkotlin/reflect/KClass;
|
||||
public abstract fun invoke (Lhep/dataforge/meta/Meta;Lhep/dataforge/context/Context;)Lhep/dataforge/io/MetaFormat;
|
||||
}
|
||||
|
||||
public final class hep/dataforge/io/MetaFormatFactory$Companion {
|
||||
public static final field META_FORMAT_TYPE Ljava/lang/String;
|
||||
}
|
||||
|
||||
public final class hep/dataforge/io/MetaFormatFactory$DefaultImpls {
|
||||
public static fun getKey (Lhep/dataforge/io/MetaFormatFactory;)S
|
||||
public static fun getName (Lhep/dataforge/io/MetaFormatFactory;)Lhep/dataforge/names/Name;
|
||||
public static fun getType (Lhep/dataforge/io/MetaFormatFactory;)Lkotlin/reflect/KClass;
|
||||
public static fun readObject (Lhep/dataforge/io/MetaFormatFactory;Lkotlinx/io/Input;)Lhep/dataforge/meta/Meta;
|
||||
public static fun toMeta (Lhep/dataforge/io/MetaFormatFactory;)Lhep/dataforge/meta/Meta;
|
||||
public static fun writeObject (Lhep/dataforge/io/MetaFormatFactory;Lkotlinx/io/Output;Lhep/dataforge/meta/Meta;)V
|
||||
}
|
||||
|
||||
public final class hep/dataforge/io/MetaFormatKt {
|
||||
public static final fun parse (Lhep/dataforge/io/MetaFormat;Ljava/lang/String;)Lhep/dataforge/meta/Meta;
|
||||
public static final fun parse (Lhep/dataforge/io/MetaFormatFactory;Ljava/lang/String;Lhep/dataforge/meta/Meta;)Lhep/dataforge/meta/Meta;
|
||||
public static final fun toString (Lhep/dataforge/meta/Meta;Lhep/dataforge/io/MetaFormat;)Ljava/lang/String;
|
||||
public static final fun toString (Lhep/dataforge/meta/Meta;Lhep/dataforge/io/MetaFormatFactory;)Ljava/lang/String;
|
||||
}
|
||||
|
||||
public final class hep/dataforge/io/PartialEnvelope {
|
||||
public synthetic fun <init> (Lhep/dataforge/meta/Meta;ILkotlin/ULong;Lkotlin/jvm/internal/DefaultConstructorMarker;)V
|
||||
public final fun component1 ()Lhep/dataforge/meta/Meta;
|
||||
public final fun component2-pVg5ArA ()I
|
||||
public final fun component3-6VbMDqA ()Lkotlin/ULong;
|
||||
public final fun copy-BMK4sig (Lhep/dataforge/meta/Meta;ILkotlin/ULong;)Lhep/dataforge/io/PartialEnvelope;
|
||||
public static synthetic fun copy-BMK4sig$default (Lhep/dataforge/io/PartialEnvelope;Lhep/dataforge/meta/Meta;ILkotlin/ULong;ILjava/lang/Object;)Lhep/dataforge/io/PartialEnvelope;
|
||||
public fun equals (Ljava/lang/Object;)Z
|
||||
public final fun getDataOffset-pVg5ArA ()I
|
||||
public final fun getDataSize-6VbMDqA ()Lkotlin/ULong;
|
||||
public final fun getMeta ()Lhep/dataforge/meta/Meta;
|
||||
public fun hashCode ()I
|
||||
public fun toString ()Ljava/lang/String;
|
||||
}
|
||||
|
||||
public final class hep/dataforge/io/ProxyEnvelope : hep/dataforge/io/Envelope {
|
||||
public fun <init> (Lhep/dataforge/io/Envelope;[Lhep/dataforge/meta/Meta;)V
|
||||
public fun getData ()Lkotlinx/io/Binary;
|
||||
public fun getMeta ()Lhep/dataforge/meta/Laminate;
|
||||
public synthetic fun getMeta ()Lhep/dataforge/meta/Meta;
|
||||
public final fun getSource ()Lhep/dataforge/io/Envelope;
|
||||
}
|
||||
|
||||
public abstract interface class hep/dataforge/io/Responder {
|
||||
public abstract fun respond (Lhep/dataforge/io/Envelope;Lkotlin/coroutines/Continuation;)Ljava/lang/Object;
|
||||
}
|
||||
|
||||
public final class hep/dataforge/io/SimpleEnvelope : hep/dataforge/io/Envelope {
|
||||
public fun <init> (Lhep/dataforge/meta/Meta;Lkotlinx/io/Binary;)V
|
||||
public fun getData ()Lkotlinx/io/Binary;
|
||||
public fun getMeta ()Lhep/dataforge/meta/Meta;
|
||||
}
|
||||
|
||||
public final class hep/dataforge/io/StreamsIOKt {
|
||||
public static final fun read (Ljava/io/InputStream;ILkotlin/jvm/functions/Function1;)Ljava/lang/Object;
|
||||
public static final fun read (Ljava/io/InputStream;Lkotlin/jvm/functions/Function1;)Ljava/lang/Object;
|
||||
public static final fun readBlocking (Ljava/io/InputStream;Lkotlin/jvm/functions/Function1;)Ljava/lang/Object;
|
||||
public static final fun write (Ljava/io/OutputStream;Lkotlin/jvm/functions/Function1;)V
|
||||
}
|
||||
|
||||
public final class hep/dataforge/io/TaggedEnvelopeFormat : hep/dataforge/io/EnvelopeFormat {
|
||||
public static final field Companion Lhep/dataforge/io/TaggedEnvelopeFormat$Companion;
|
||||
public fun <init> (Lhep/dataforge/io/IOPlugin;Lhep/dataforge/io/TaggedEnvelopeFormat$VERSION;)V
|
||||
public synthetic fun <init> (Lhep/dataforge/io/IOPlugin;Lhep/dataforge/io/TaggedEnvelopeFormat$VERSION;ILkotlin/jvm/internal/DefaultConstructorMarker;)V
|
||||
public fun getDefaultMetaFormat ()Lhep/dataforge/io/MetaFormatFactory;
|
||||
public final fun getIo ()Lhep/dataforge/io/IOPlugin;
|
||||
public final fun getVersion ()Lhep/dataforge/io/TaggedEnvelopeFormat$VERSION;
|
||||
public fun readObject (Lkotlinx/io/Input;)Lhep/dataforge/io/Envelope;
|
||||
public synthetic fun readObject (Lkotlinx/io/Input;)Ljava/lang/Object;
|
||||
public fun readPartial (Lkotlinx/io/Input;)Lhep/dataforge/io/PartialEnvelope;
|
||||
public fun toMeta ()Lhep/dataforge/meta/Meta;
|
||||
public fun writeEnvelope (Lkotlinx/io/Output;Lhep/dataforge/io/Envelope;Lhep/dataforge/io/MetaFormatFactory;Lhep/dataforge/meta/Meta;)V
|
||||
public fun writeObject (Lkotlinx/io/Output;Lhep/dataforge/io/Envelope;)V
|
||||
public synthetic fun writeObject (Lkotlinx/io/Output;Ljava/lang/Object;)V
|
||||
}
|
||||
|
||||
public final class hep/dataforge/io/TaggedEnvelopeFormat$Companion : hep/dataforge/io/EnvelopeFormatFactory {
|
||||
public fun getDefaultMetaFormat ()Lhep/dataforge/io/MetaFormatFactory;
|
||||
public fun getName ()Lhep/dataforge/names/Name;
|
||||
public fun getType ()Lkotlin/reflect/KClass;
|
||||
public fun invoke (Lhep/dataforge/meta/Meta;Lhep/dataforge/context/Context;)Lhep/dataforge/io/EnvelopeFormat;
|
||||
public synthetic fun invoke (Lhep/dataforge/meta/Meta;Lhep/dataforge/context/Context;)Ljava/lang/Object;
|
||||
public fun peekFormat (Lhep/dataforge/io/IOPlugin;Lkotlinx/io/Input;)Lhep/dataforge/io/EnvelopeFormat;
|
||||
public fun readObject (Lkotlinx/io/Input;)Lhep/dataforge/io/Envelope;
|
||||
public synthetic fun readObject (Lkotlinx/io/Input;)Ljava/lang/Object;
|
||||
public fun readPartial (Lkotlinx/io/Input;)Lhep/dataforge/io/PartialEnvelope;
|
||||
public fun toMeta ()Lhep/dataforge/meta/Meta;
|
||||
public fun writeEnvelope (Lkotlinx/io/Output;Lhep/dataforge/io/Envelope;Lhep/dataforge/io/MetaFormatFactory;Lhep/dataforge/meta/Meta;)V
|
||||
public fun writeObject (Lkotlinx/io/Output;Lhep/dataforge/io/Envelope;)V
|
||||
public synthetic fun writeObject (Lkotlinx/io/Output;Ljava/lang/Object;)V
|
||||
}
|
||||
|
||||
public final class hep/dataforge/io/TaggedEnvelopeFormat$VERSION : java/lang/Enum {
|
||||
public static final field DF02 Lhep/dataforge/io/TaggedEnvelopeFormat$VERSION;
|
||||
public static final field DF03 Lhep/dataforge/io/TaggedEnvelopeFormat$VERSION;
|
||||
public final fun getTagSize-pVg5ArA ()I
|
||||
public static fun valueOf (Ljava/lang/String;)Lhep/dataforge/io/TaggedEnvelopeFormat$VERSION;
|
||||
public static fun values ()[Lhep/dataforge/io/TaggedEnvelopeFormat$VERSION;
|
||||
}
|
||||
|
||||
public final class hep/dataforge/io/TaglessEnvelopeFormat : hep/dataforge/io/EnvelopeFormat {
|
||||
public static final field Companion Lhep/dataforge/io/TaglessEnvelopeFormat$Companion;
|
||||
public static final field DATA_LENGTH_PROPERTY Ljava/lang/String;
|
||||
public static final field DATA_START_PROPERTY Ljava/lang/String;
|
||||
public static final field DEFAULT_DATA_START Ljava/lang/String;
|
||||
public static final field DEFAULT_META_START Ljava/lang/String;
|
||||
public static final field META_LENGTH_PROPERTY Ljava/lang/String;
|
||||
public static final field META_START_PROPERTY Ljava/lang/String;
|
||||
public static final field META_TYPE_PROPERTY Ljava/lang/String;
|
||||
public static final field TAGLESS_ENVELOPE_HEADER Ljava/lang/String;
|
||||
public static final field TAGLESS_ENVELOPE_TYPE Ljava/lang/String;
|
||||
public static final field code I
|
||||
public fun <init> (Lhep/dataforge/io/IOPlugin;Lhep/dataforge/meta/Meta;)V
|
||||
public synthetic fun <init> (Lhep/dataforge/io/IOPlugin;Lhep/dataforge/meta/Meta;ILkotlin/jvm/internal/DefaultConstructorMarker;)V
|
||||
public fun getDefaultMetaFormat ()Lhep/dataforge/io/MetaFormatFactory;
|
||||
public final fun getIo ()Lhep/dataforge/io/IOPlugin;
|
||||
public final fun getMeta ()Lhep/dataforge/meta/Meta;
|
||||
public fun readObject (Lkotlinx/io/Input;)Lhep/dataforge/io/Envelope;
|
||||
public synthetic fun readObject (Lkotlinx/io/Input;)Ljava/lang/Object;
|
||||
public fun readPartial (Lkotlinx/io/Input;)Lhep/dataforge/io/PartialEnvelope;
|
||||
public fun toMeta ()Lhep/dataforge/meta/Meta;
|
||||
public fun writeEnvelope (Lkotlinx/io/Output;Lhep/dataforge/io/Envelope;Lhep/dataforge/io/MetaFormatFactory;Lhep/dataforge/meta/Meta;)V
|
||||
public fun writeObject (Lkotlinx/io/Output;Lhep/dataforge/io/Envelope;)V
|
||||
public synthetic fun writeObject (Lkotlinx/io/Output;Ljava/lang/Object;)V
|
||||
}
|
||||
|
||||
public final class hep/dataforge/io/TaglessEnvelopeFormat$Companion : hep/dataforge/io/EnvelopeFormatFactory {
|
||||
public fun getDefaultMetaFormat ()Lhep/dataforge/io/MetaFormatFactory;
|
||||
public fun getName ()Lhep/dataforge/names/Name;
|
||||
public fun getType ()Lkotlin/reflect/KClass;
|
||||
public fun invoke (Lhep/dataforge/meta/Meta;Lhep/dataforge/context/Context;)Lhep/dataforge/io/EnvelopeFormat;
|
||||
public synthetic fun invoke (Lhep/dataforge/meta/Meta;Lhep/dataforge/context/Context;)Ljava/lang/Object;
|
||||
public fun peekFormat (Lhep/dataforge/io/IOPlugin;Lkotlinx/io/Input;)Lhep/dataforge/io/EnvelopeFormat;
|
||||
public fun readObject (Lkotlinx/io/Input;)Lhep/dataforge/io/Envelope;
|
||||
public synthetic fun readObject (Lkotlinx/io/Input;)Ljava/lang/Object;
|
||||
public fun readPartial (Lkotlinx/io/Input;)Lhep/dataforge/io/PartialEnvelope;
|
||||
public fun toMeta ()Lhep/dataforge/meta/Meta;
|
||||
public fun writeEnvelope (Lkotlinx/io/Output;Lhep/dataforge/io/Envelope;Lhep/dataforge/io/MetaFormatFactory;Lhep/dataforge/meta/Meta;)V
|
||||
public fun writeObject (Lkotlinx/io/Output;Lhep/dataforge/io/Envelope;)V
|
||||
public synthetic fun writeObject (Lkotlinx/io/Output;Ljava/lang/Object;)V
|
||||
}
|
||||
|
||||
public final class hep/dataforge/io/ValueIOFormat : hep/dataforge/io/IOFormat, hep/dataforge/io/IOFormatFactory {
|
||||
public static final field INSTANCE Lhep/dataforge/io/ValueIOFormat;
|
||||
public fun getName ()Lhep/dataforge/names/Name;
|
||||
public fun getType ()Lkotlin/reflect/KClass;
|
||||
public fun invoke (Lhep/dataforge/meta/Meta;Lhep/dataforge/context/Context;)Lhep/dataforge/io/IOFormat;
|
||||
public synthetic fun invoke (Lhep/dataforge/meta/Meta;Lhep/dataforge/context/Context;)Ljava/lang/Object;
|
||||
public fun readObject (Lkotlinx/io/Input;)Lhep/dataforge/values/Value;
|
||||
public synthetic fun readObject (Lkotlinx/io/Input;)Ljava/lang/Object;
|
||||
public fun toMeta ()Lhep/dataforge/meta/Meta;
|
||||
public fun writeObject (Lkotlinx/io/Output;Lhep/dataforge/values/Value;)V
|
||||
public synthetic fun writeObject (Lkotlinx/io/Output;Ljava/lang/Object;)V
|
||||
}
|
||||
|
@ -1,31 +1,26 @@
|
||||
plugins {
|
||||
id("scientifik.mpp")
|
||||
id("space.kscience.gradle.mpp")
|
||||
}
|
||||
|
||||
description = "IO module"
|
||||
|
||||
scientifik{
|
||||
withSerialization()
|
||||
withIO()
|
||||
val ioVersion = "0.2.1"
|
||||
|
||||
kscience {
|
||||
jvm()
|
||||
js()
|
||||
native()
|
||||
useSerialization()
|
||||
useSerialization(sourceSet = space.kscience.gradle.DependencySourceSet.TEST) {
|
||||
cbor()
|
||||
}
|
||||
dependencies {
|
||||
api(projects.dataforgeContext)
|
||||
api("org.jetbrains.kotlinx:kotlinx-io-core:$ioVersion")
|
||||
api("org.jetbrains.kotlinx:kotlinx-io-bytestring:$ioVersion")
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
kotlin {
|
||||
sourceSets {
|
||||
commonMain{
|
||||
dependencies {
|
||||
api(project(":dataforge-context"))
|
||||
}
|
||||
}
|
||||
jvmMain{
|
||||
dependencies {
|
||||
|
||||
}
|
||||
}
|
||||
jsMain{
|
||||
dependencies{
|
||||
api(npm("text-encoding"))
|
||||
}
|
||||
}
|
||||
}
|
||||
readme{
|
||||
maturity = space.kscience.gradle.Maturity.EXPERIMENTAL
|
||||
}
|
23
dataforge-io/dataforge-io-yaml/README.md
Normal file
23
dataforge-io/dataforge-io-yaml/README.md
Normal file
@ -0,0 +1,23 @@
|
||||
# Module dataforge-io-yaml
|
||||
|
||||
YAML meta IO
|
||||
|
||||
## Usage
|
||||
|
||||
## Artifact:
|
||||
|
||||
The Maven coordinates of this project are `space.kscience:dataforge-io-yaml:0.7.0`.
|
||||
|
||||
**Gradle Kotlin DSL:**
|
||||
```kotlin
|
||||
repositories {
|
||||
maven("https://repo.kotlin.link")
|
||||
//uncomment to access development builds
|
||||
//maven("https://maven.pkg.jetbrains.space/spc/p/sci/dev")
|
||||
mavenCentral()
|
||||
}
|
||||
|
||||
dependencies {
|
||||
implementation("space.kscience:dataforge-io-yaml:0.7.0")
|
||||
}
|
||||
```
|
66
dataforge-io/dataforge-io-yaml/api/dataforge-io-yaml.api
Normal file
66
dataforge-io/dataforge-io-yaml/api/dataforge-io-yaml.api
Normal file
@ -0,0 +1,66 @@
|
||||
public final class hep/dataforge/io/yaml/FrontMatterEnvelopeFormat : hep/dataforge/io/EnvelopeFormat {
|
||||
public static final field Companion Lhep/dataforge/io/yaml/FrontMatterEnvelopeFormat$Companion;
|
||||
public static final field SEPARATOR Ljava/lang/String;
|
||||
public fun <init> (Lhep/dataforge/io/IOPlugin;Lhep/dataforge/meta/Meta;)V
|
||||
public synthetic fun <init> (Lhep/dataforge/io/IOPlugin;Lhep/dataforge/meta/Meta;ILkotlin/jvm/internal/DefaultConstructorMarker;)V
|
||||
public fun getDefaultMetaFormat ()Lhep/dataforge/io/MetaFormatFactory;
|
||||
public fun readObject (Lkotlinx/io/Input;)Lhep/dataforge/io/Envelope;
|
||||
public synthetic fun readObject (Lkotlinx/io/Input;)Ljava/lang/Object;
|
||||
public fun readPartial (Lkotlinx/io/Input;)Lhep/dataforge/io/PartialEnvelope;
|
||||
public fun toMeta ()Lhep/dataforge/meta/Meta;
|
||||
public fun writeEnvelope (Lkotlinx/io/Output;Lhep/dataforge/io/Envelope;Lhep/dataforge/io/MetaFormatFactory;Lhep/dataforge/meta/Meta;)V
|
||||
public fun writeObject (Lkotlinx/io/Output;Lhep/dataforge/io/Envelope;)V
|
||||
public synthetic fun writeObject (Lkotlinx/io/Output;Ljava/lang/Object;)V
|
||||
}
|
||||
|
||||
public final class hep/dataforge/io/yaml/FrontMatterEnvelopeFormat$Companion : hep/dataforge/io/EnvelopeFormatFactory {
|
||||
public fun getDefaultMetaFormat ()Lhep/dataforge/io/MetaFormatFactory;
|
||||
public fun getName ()Lhep/dataforge/names/Name;
|
||||
public fun getType ()Lkotlin/reflect/KClass;
|
||||
public fun invoke (Lhep/dataforge/meta/Meta;Lhep/dataforge/context/Context;)Lhep/dataforge/io/EnvelopeFormat;
|
||||
public synthetic fun invoke (Lhep/dataforge/meta/Meta;Lhep/dataforge/context/Context;)Ljava/lang/Object;
|
||||
public fun peekFormat (Lhep/dataforge/io/IOPlugin;Lkotlinx/io/Input;)Lhep/dataforge/io/EnvelopeFormat;
|
||||
public fun readObject (Lkotlinx/io/Input;)Lhep/dataforge/io/Envelope;
|
||||
public synthetic fun readObject (Lkotlinx/io/Input;)Ljava/lang/Object;
|
||||
public fun readPartial (Lkotlinx/io/Input;)Lhep/dataforge/io/PartialEnvelope;
|
||||
public fun toMeta ()Lhep/dataforge/meta/Meta;
|
||||
public fun writeEnvelope (Lkotlinx/io/Output;Lhep/dataforge/io/Envelope;Lhep/dataforge/io/MetaFormatFactory;Lhep/dataforge/meta/Meta;)V
|
||||
public fun writeObject (Lkotlinx/io/Output;Lhep/dataforge/io/Envelope;)V
|
||||
public synthetic fun writeObject (Lkotlinx/io/Output;Ljava/lang/Object;)V
|
||||
}
|
||||
|
||||
public final class hep/dataforge/io/yaml/YamlMetaFormat : hep/dataforge/io/MetaFormat {
|
||||
public static final field Companion Lhep/dataforge/io/yaml/YamlMetaFormat$Companion;
|
||||
public fun <init> (Lhep/dataforge/meta/Meta;)V
|
||||
public fun readMeta (Lkotlinx/io/Input;Lhep/dataforge/meta/descriptors/NodeDescriptor;)Lhep/dataforge/meta/Meta;
|
||||
public fun readObject (Lkotlinx/io/Input;)Lhep/dataforge/meta/Meta;
|
||||
public synthetic fun readObject (Lkotlinx/io/Input;)Ljava/lang/Object;
|
||||
public fun toMeta ()Lhep/dataforge/meta/Meta;
|
||||
public fun writeMeta (Lkotlinx/io/Output;Lhep/dataforge/meta/Meta;Lhep/dataforge/meta/descriptors/NodeDescriptor;)V
|
||||
public fun writeObject (Lkotlinx/io/Output;Lhep/dataforge/meta/Meta;)V
|
||||
public synthetic fun writeObject (Lkotlinx/io/Output;Ljava/lang/Object;)V
|
||||
}
|
||||
|
||||
public final class hep/dataforge/io/yaml/YamlMetaFormat$Companion : hep/dataforge/io/MetaFormatFactory {
|
||||
public fun getKey ()S
|
||||
public fun getName ()Lhep/dataforge/names/Name;
|
||||
public fun getShortName ()Ljava/lang/String;
|
||||
public fun getType ()Lkotlin/reflect/KClass;
|
||||
public fun invoke (Lhep/dataforge/meta/Meta;Lhep/dataforge/context/Context;)Lhep/dataforge/io/MetaFormat;
|
||||
public synthetic fun invoke (Lhep/dataforge/meta/Meta;Lhep/dataforge/context/Context;)Ljava/lang/Object;
|
||||
public fun readMeta (Lkotlinx/io/Input;Lhep/dataforge/meta/descriptors/NodeDescriptor;)Lhep/dataforge/meta/Meta;
|
||||
public fun readObject (Lkotlinx/io/Input;)Lhep/dataforge/meta/Meta;
|
||||
public synthetic fun readObject (Lkotlinx/io/Input;)Ljava/lang/Object;
|
||||
public fun toMeta ()Lhep/dataforge/meta/Meta;
|
||||
public fun writeMeta (Lkotlinx/io/Output;Lhep/dataforge/meta/Meta;Lhep/dataforge/meta/descriptors/NodeDescriptor;)V
|
||||
public fun writeObject (Lkotlinx/io/Output;Lhep/dataforge/meta/Meta;)V
|
||||
public synthetic fun writeObject (Lkotlinx/io/Output;Ljava/lang/Object;)V
|
||||
}
|
||||
|
||||
public final class hep/dataforge/io/yaml/YamlMetaFormatKt {
|
||||
public static final fun toMeta (Lnet/mamoe/yamlkt/YamlMap;)Lhep/dataforge/meta/Meta;
|
||||
public static final fun toMetaItem (Lnet/mamoe/yamlkt/YamlElement;Lhep/dataforge/meta/descriptors/ItemDescriptor;)Lhep/dataforge/meta/TypedMetaItem;
|
||||
public static synthetic fun toMetaItem$default (Lnet/mamoe/yamlkt/YamlElement;Lhep/dataforge/meta/descriptors/ItemDescriptor;ILjava/lang/Object;)Lhep/dataforge/meta/TypedMetaItem;
|
||||
public static final fun toYaml (Lhep/dataforge/meta/Meta;)Lnet/mamoe/yamlkt/YamlMap;
|
||||
}
|
||||
|
@ -1,10 +1,24 @@
|
||||
plugins {
|
||||
id("scientifik.jvm")
|
||||
id("space.kscience.gradle.mpp")
|
||||
}
|
||||
|
||||
description = "YAML meta IO"
|
||||
|
||||
dependencies {
|
||||
api(project(":dataforge-io"))
|
||||
api("org.yaml:snakeyaml:1.25")
|
||||
kscience {
|
||||
jvm()
|
||||
js()
|
||||
native()
|
||||
dependencies {
|
||||
api(projects.dataforgeIo)
|
||||
}
|
||||
useSerialization{
|
||||
yamlKt()
|
||||
}
|
||||
}
|
||||
|
||||
readme{
|
||||
maturity = space.kscience.gradle.Maturity.PROTOTYPE
|
||||
description ="""
|
||||
YAML meta converters and Front Matter envelope format
|
||||
""".trimIndent()
|
||||
}
|
||||
|
@ -0,0 +1,97 @@
|
||||
package space.kscience.dataforge.io.yaml
|
||||
|
||||
import kotlinx.io.Sink
|
||||
import kotlinx.io.Source
|
||||
import kotlinx.io.bytestring.ByteString
|
||||
import kotlinx.io.bytestring.encodeToByteString
|
||||
import kotlinx.io.readByteString
|
||||
import kotlinx.io.writeString
|
||||
import space.kscience.dataforge.context.Context
|
||||
import space.kscience.dataforge.context.Global
|
||||
import space.kscience.dataforge.io.*
|
||||
import space.kscience.dataforge.meta.Meta
|
||||
import space.kscience.dataforge.names.Name
|
||||
import space.kscience.dataforge.names.plus
|
||||
|
||||
public class FrontMatterEnvelopeFormat(
|
||||
private val io: IOPlugin,
|
||||
private val meta: Meta = Meta.EMPTY,
|
||||
private val metaFormatFactory: MetaFormatFactory = YamlMetaFormat,
|
||||
) : EnvelopeFormat {
|
||||
|
||||
override fun readFrom(binary: Binary): Envelope = binary.read {
|
||||
var offset = 0
|
||||
|
||||
offset += discardWithSeparator(
|
||||
SEPARATOR,
|
||||
atMost = 1024,
|
||||
)
|
||||
|
||||
val line = ByteArray {
|
||||
offset += readWithSeparatorTo(this, "\n".encodeToByteString())
|
||||
}.decodeToString()
|
||||
|
||||
val readMetaFormat = line.trim().takeIf { it.isNotBlank() }?.let { io.resolveMetaFormat(it) } ?: YamlMetaFormat
|
||||
|
||||
val packet = ByteArray {
|
||||
offset += readWithSeparatorTo(this, SEPARATOR)
|
||||
}
|
||||
|
||||
offset += discardLine()
|
||||
|
||||
val meta = readMetaFormat.readFrom(packet.asBinary())
|
||||
Envelope(meta, binary.view(offset))
|
||||
}
|
||||
|
||||
override fun readFrom(source: Source): Envelope = readFrom(source.readBinary())
|
||||
|
||||
override fun writeTo(
|
||||
sink: Sink,
|
||||
obj: Envelope,
|
||||
) {
|
||||
val metaFormat = metaFormatFactory.build(io.context, meta)
|
||||
val formatSuffix = if (metaFormat is YamlMetaFormat) "" else metaFormatFactory.shortName
|
||||
sink.writeString("$SEPARATOR${formatSuffix}\r\n")
|
||||
metaFormat.run { metaFormat.writeTo(sink, obj.meta) }
|
||||
sink.writeString("$SEPARATOR\r\n")
|
||||
//Printing data
|
||||
obj.data?.let { data ->
|
||||
sink.writeBinary(data)
|
||||
}
|
||||
}
|
||||
|
||||
public companion object : EnvelopeFormatFactory {
|
||||
public val SEPARATOR: ByteString = "---".encodeToByteString()
|
||||
|
||||
private val metaTypeRegex = "---(\\w*)\\s*".toRegex()
|
||||
|
||||
override val name: Name = EnvelopeFormatFactory.ENVELOPE_FACTORY_NAME + "frontMatter"
|
||||
|
||||
override fun build(context: Context, meta: Meta): EnvelopeFormat {
|
||||
return FrontMatterEnvelopeFormat(context.io, meta)
|
||||
}
|
||||
|
||||
override fun peekFormat(io: IOPlugin, binary: Binary): EnvelopeFormat? = binary.read {
|
||||
//read raw string to avoid UTF issues
|
||||
val line = readByteString(3)
|
||||
return@read if (line == "---".encodeToByteString()) {
|
||||
default
|
||||
} else {
|
||||
null
|
||||
}
|
||||
}
|
||||
|
||||
private val default by lazy { build(Global, Meta.EMPTY) }
|
||||
|
||||
override fun readFrom(binary: Binary): Envelope = default.readFrom(binary)
|
||||
|
||||
override fun writeTo(
|
||||
sink: Sink,
|
||||
obj: Envelope,
|
||||
): Unit = default.writeTo(sink, obj)
|
||||
|
||||
|
||||
override fun readFrom(source: Source): Envelope = default.readFrom(source)
|
||||
|
||||
}
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user