Compare commits
237 Commits
Author | SHA1 | Date | |
---|---|---|---|
c754dc3471 | |||
8f7d754301 | |||
c923c3e7d3 | |||
99b2d941c8 | |||
191af77f57 | |||
79759c5256 | |||
2eb965e563 | |||
1b29e377ca | |||
2634a19285 | |||
261c415d3d | |||
e52d509c2b | |||
706521a6b6 | |||
94000689da | |||
851fdda311 | |||
cbbcd18df3 | |||
dc2bf5da83 | |||
f732b85cc5 | |||
259b882e63 | |||
526f230300 | |||
|
7fca5db390 | ||
|
be160ba98a | ||
|
182f206b88 | ||
3806f97c77 | |||
d5ebef404f | |||
3644533043 | |||
ee5afcdafe | |||
de476fb273 | |||
a136db16ff | |||
a699c36f8e | |||
2aba1b48dc | |||
cfa20eedba | |||
|
f78f0f814f | ||
4e7ead0763 | |||
4543648cda | |||
b6949310ea | |||
2c2f33427a | |||
29fa30fb51 | |||
f3afb5e9fe | |||
61c8df9eb0 | |||
707b59e6fc | |||
add400b324 | |||
58c5355e25 | |||
f83b759e75 | |||
7d88f828d7 | |||
5d7ddb4e00 | |||
82838b6a92 | |||
e41fdfc086 | |||
4117a05df4 | |||
5406a6a64c | |||
0cc4dc0db7 | |||
233639f0b6 | |||
70bd92f019 | |||
f8eea45ed0 | |||
e14c0a695e | |||
3c1fe23366 | |||
81e2ad06cc | |||
6ca76cff17 | |||
9a24e1e392 | |||
9cceb44a90 | |||
0b68c1edae | |||
b8869570ce | |||
4833128857 | |||
6bd8a7acbc | |||
f5d32ba511 | |||
0fc2198832 | |||
a546552540 | |||
fe92e8fccf | |||
7d9189e15c | |||
0622bacc4d | |||
bedab0dc86 | |||
f0820a3bed | |||
665f317e4e | |||
82d37f4b55 | |||
6d396368b7 | |||
77857289f0 | |||
eaa9d40d60 | |||
6b41163ed3 | |||
e5000171f1 | |||
3c6bc15716 | |||
11143e4ba1 | |||
91621864c2 | |||
1e97165328 | |||
|
be8e971436 | ||
|
9cc30b1f4e | ||
|
7414e60192 | ||
|
8c0bc05a9a | ||
c480cd8e4d | |||
64e0c554cc | |||
532e0c253b | |||
c423dc214e | |||
d178c4ff0d | |||
|
387ab8747e | ||
3f54eee578 | |||
aded38254e | |||
00d964eef3 | |||
b07d281a83 | |||
81cdd38c40 | |||
0ad6852e36 | |||
a71bb732da | |||
|
acfe9c2f74 | ||
ce8074c104 | |||
922a3b07ee | |||
81abbe28a9 | |||
da9d6e7639 | |||
66c708d9fb | |||
c1065c2885 | |||
e5f422f9ca | |||
c01bc36d41 | |||
be2daca25e | |||
b968d735ce | |||
67554a8c98 | |||
24187722e4 | |||
d3c129526d | |||
5632487dca | |||
e432b07201 | |||
28a6914747 | |||
90a92c4121 | |||
b404615145 | |||
7aec2f3547 | |||
3ba5a9076b | |||
8763d63e28 | |||
4a76063093 | |||
5fbbac465a | |||
b387b21554 | |||
c8bd3390cb | |||
a3479e74f7 | |||
9f5b010847 | |||
679175391a | |||
14455c2b2b | |||
a9cec666a3 | |||
d2ea1a975e | |||
73b3bbe7fc | |||
474597777c | |||
9d3c7149b7 | |||
bc9cd3b5a8 | |||
254163bdef | |||
|
3bdaf332cb | ||
|
6e769d1089 | ||
|
e62ff61814 | ||
1037c45c0d | |||
82b328f797 | |||
a6f1e54255 | |||
3888b2d9e7 | |||
0acb6ec448 | |||
|
b616e3ad6d | ||
|
d1381cc98c | ||
|
543023d2df | ||
|
d769b0d389 | ||
|
3fc698dd09 | ||
|
32b986fc47 | ||
3e8421187f | |||
5afe0523f1 | |||
a7ecbfb763 | |||
2352f1cff1 | |||
1a983665f8 | |||
53393e7958 | |||
874a253292 | |||
187094d942 | |||
23f1d4f7fd | |||
f1f5f7a70c | |||
9fbc482549 | |||
|
b86c6141cd | ||
ddd2bd99be | |||
|
e13e3ab6bf | ||
bdef0d9185 | |||
6bdcd0f37d | |||
18bfde9978 | |||
64100dcfe4 | |||
3f0d088c48 | |||
b6dad141f8 | |||
f3d43cd40a | |||
c98ffd1eb4 | |||
488cd5a939 | |||
cf0c934acf | |||
|
a7ee2f5922 | ||
a726307641 | |||
db4ed02f9d | |||
2b945d4a78 | |||
|
f6210fde7f | ||
5d02520904 | |||
7d3df24568 | |||
1970243785 | |||
03337f00f0 | |||
81fb064d38 | |||
730ac69544 | |||
fcd99b1ca8 | |||
6a0bfae931 | |||
11ba116a89 | |||
4d19d97c53 | |||
e66ae408cd | |||
0e053ab78d | |||
66355793ee | |||
e88178ffe7 | |||
25cba0c6de | |||
2291072e26 | |||
80d3a64cdf | |||
9dacd98f64 | |||
366d32a04a | |||
ac8631e3a0 | |||
7e4d1af55f | |||
3334380693 | |||
b916a038f7 | |||
23fae9794f | |||
9ed4245d84 | |||
13c0d189bb | |||
8b1d5eb69e | |||
030f3ed6fe | |||
027d5ed923 | |||
f946777f40 | |||
7a9f7da7f2 | |||
81375d4644 | |||
9c3f9420ea | |||
adf65a5bde | |||
460dc77d51 | |||
76968f07e5 | |||
6389a25aaf | |||
1c89543d73 | |||
397a19fb32 | |||
e931994b75 | |||
617ed13efa | |||
89f0d627b8 | |||
702589f7b3 | |||
1f773cc230 | |||
95e6925d55 | |||
62e08a1b75 | |||
18407424fe | |||
4c5f0e9e43 | |||
7c4d69ec1b | |||
e317b67a48 | |||
2ba4121a36 | |||
eb16294a7e | |||
17c9bf3d54 | |||
221cc65b78 | |||
2098d96561 | |||
4c98d62e8f | |||
47d49d1e0e | |||
1b46d00a91 |
41
.github/workflows/build.yml
vendored
Normal file
41
.github/workflows/build.yml
vendored
Normal file
@ -0,0 +1,41 @@
|
|||||||
|
name: Gradle build
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [ dev, master ]
|
||||||
|
pull_request:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build:
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
os: [ macOS-latest, windows-latest ]
|
||||||
|
runs-on: ${{matrix.os}}
|
||||||
|
timeout-minutes: 40
|
||||||
|
steps:
|
||||||
|
- name: Checkout the repo
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
- name: Set up JDK 11
|
||||||
|
uses: DeLaGuardo/setup-graalvm@4.0
|
||||||
|
with:
|
||||||
|
graalvm: 21.2.0
|
||||||
|
java: java11
|
||||||
|
arch: amd64
|
||||||
|
- name: Cache gradle
|
||||||
|
uses: actions/cache@v2
|
||||||
|
with:
|
||||||
|
path: |
|
||||||
|
~/.gradle/caches
|
||||||
|
~/.gradle/wrapper
|
||||||
|
key: ${{ runner.os }}-gradle-${{ hashFiles('*.gradle.kts') }}
|
||||||
|
restore-keys: |
|
||||||
|
${{ runner.os }}-gradle-
|
||||||
|
- name: Cache konan
|
||||||
|
uses: actions/cache@v2
|
||||||
|
with:
|
||||||
|
path: ~/.konan
|
||||||
|
key: ${{ runner.os }}-gradle-${{ hashFiles('*.gradle.kts') }}
|
||||||
|
restore-keys: |
|
||||||
|
${{ runner.os }}-gradle-
|
||||||
|
- name: Build
|
||||||
|
run: ./gradlew build --build-cache --no-daemon --stacktrace
|
17
.github/workflows/gradle.yml
vendored
17
.github/workflows/gradle.yml
vendored
@ -1,17 +0,0 @@
|
|||||||
name: Gradle build
|
|
||||||
|
|
||||||
on: [push]
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
build:
|
|
||||||
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v1
|
|
||||||
- name: Set up JDK 11
|
|
||||||
uses: actions/setup-java@v1
|
|
||||||
with:
|
|
||||||
java-version: 11
|
|
||||||
- name: Build with Gradle
|
|
||||||
run: ./gradlew build
|
|
31
.github/workflows/pages.yml
vendored
Normal file
31
.github/workflows/pages.yml
vendored
Normal file
@ -0,0 +1,31 @@
|
|||||||
|
name: Dokka publication
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
release:
|
||||||
|
types: [ created ]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
timeout-minutes: 40
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3.0.0
|
||||||
|
- uses: actions/setup-java@v3.0.0
|
||||||
|
with:
|
||||||
|
java-version: 11
|
||||||
|
distribution: liberica
|
||||||
|
- name: Cache konan
|
||||||
|
uses: actions/cache@v3.0.1
|
||||||
|
with:
|
||||||
|
path: ~/.konan
|
||||||
|
key: ${{ runner.os }}-gradle-${{ hashFiles('*.gradle.kts') }}
|
||||||
|
restore-keys: |
|
||||||
|
${{ runner.os }}-gradle-
|
||||||
|
- uses: gradle/gradle-build-action@v2.4.2
|
||||||
|
with:
|
||||||
|
arguments: dokkaHtmlMultiModule --no-parallel
|
||||||
|
- uses: JamesIves/github-pages-deploy-action@v4.3.0
|
||||||
|
with:
|
||||||
|
branch: gh-pages
|
||||||
|
folder: build/dokka/htmlMultiModule
|
50
.github/workflows/publish.yml
vendored
Normal file
50
.github/workflows/publish.yml
vendored
Normal file
@ -0,0 +1,50 @@
|
|||||||
|
name: Gradle publish
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
release:
|
||||||
|
types: [ created ]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
publish:
|
||||||
|
environment:
|
||||||
|
name: publish
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
os: [ macOS-latest, windows-latest ]
|
||||||
|
runs-on: ${{matrix.os}}
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3.0.0
|
||||||
|
- uses: actions/setup-java@v3.10.0
|
||||||
|
with:
|
||||||
|
java-version: 11
|
||||||
|
distribution: liberica
|
||||||
|
- name: Cache konan
|
||||||
|
uses: actions/cache@v3.0.1
|
||||||
|
with:
|
||||||
|
path: ~/.konan
|
||||||
|
key: ${{ runner.os }}-gradle-${{ hashFiles('*.gradle.kts') }}
|
||||||
|
restore-keys: |
|
||||||
|
${{ runner.os }}-gradle-
|
||||||
|
- name: Publish Windows Artifacts
|
||||||
|
if: matrix.os == 'windows-latest'
|
||||||
|
uses: gradle/gradle-build-action@v2.4.2
|
||||||
|
with:
|
||||||
|
arguments: |
|
||||||
|
publishAllPublicationsToSpaceRepository
|
||||||
|
-Ppublishing.targets=all
|
||||||
|
-Ppublishing.space.user=${{ secrets.SPACE_APP_ID }}
|
||||||
|
-Ppublishing.space.token=${{ secrets.SPACE_APP_SECRET }}
|
||||||
|
- name: Publish Mac Artifacts
|
||||||
|
if: matrix.os == 'macOS-latest'
|
||||||
|
uses: gradle/gradle-build-action@v2.4.2
|
||||||
|
with:
|
||||||
|
arguments: |
|
||||||
|
publishMacosX64PublicationToSpaceRepository
|
||||||
|
publishMacosArm64PublicationToSpaceRepository
|
||||||
|
publishIosX64PublicationToSpaceRepository
|
||||||
|
publishIosArm64PublicationToSpaceRepository
|
||||||
|
publishIosSimulatorArm64PublicationToSpaceRepository
|
||||||
|
-Ppublishing.targets=all
|
||||||
|
-Ppublishing.space.user=${{ secrets.SPACE_APP_ID }}
|
||||||
|
-Ppublishing.space.token=${{ secrets.SPACE_APP_SECRET }}
|
186
CHANGELOG.md
186
CHANGELOG.md
@ -1,6 +1,7 @@
|
|||||||
# Changelog
|
# Changelog
|
||||||
|
|
||||||
## [Unreleased]
|
## Unreleased
|
||||||
|
|
||||||
### Added
|
### Added
|
||||||
|
|
||||||
### Changed
|
### Changed
|
||||||
@ -12,10 +13,186 @@
|
|||||||
### Fixed
|
### Fixed
|
||||||
|
|
||||||
### Security
|
### Security
|
||||||
## [0.2.0]
|
|
||||||
|
## 0.7.0 - 2023-11-26
|
||||||
|
|
||||||
### Added
|
### Added
|
||||||
|
|
||||||
|
- Obligatory `type: KType` and `descriptor` property for `MetaConverters`
|
||||||
|
- Added separate `Meta`, `SealedMeta` and `ObservableMutableMeta` builders.
|
||||||
|
|
||||||
### Changed
|
### Changed
|
||||||
|
|
||||||
|
- Meta converter `metaToObject` returns a non-nullable type. Additional method `metaToObjectOrNull` for nullable return.
|
||||||
|
- Kotlin 1.9.20.
|
||||||
|
- Migrated from ktor-io to kotlinx-io.
|
||||||
|
- `MutableMeta` builder now returns a simplified version of meta that does not hold listeners.
|
||||||
|
- More concise names for read/write methods in IO.
|
||||||
|
- Remove unnecessary confusion with `get`/`getMeta` by removing `getMeta` from the interface.
|
||||||
|
|
||||||
|
### Deprecated
|
||||||
|
|
||||||
|
- `String.parseValue` is replaced with `Value.parse`
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
|
||||||
|
- Memory leak in SealedMeta builder
|
||||||
|
|
||||||
|
## 0.6.2 - 2023-07-29
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
|
||||||
|
- Meta to Json serializer now serializes a single item with index as an array. It is important for plotly integration.
|
||||||
|
- Meta to Json serializes Meta without children a value as literal or array instead of an object with `@value` field.
|
||||||
|
|
||||||
|
## 0.6.1 - 2023-03-31
|
||||||
|
|
||||||
|
### Added
|
||||||
|
|
||||||
|
- File cache for workspace
|
||||||
|
- Smart task metadata transformation for workspace
|
||||||
|
- Add `readOnly` property to descriptors
|
||||||
|
- Add `specOrNull` delegate to meta and Scheme
|
||||||
|
- Suspended read methods to the `Binary`
|
||||||
|
- Synchronously accessed `meta` to all `DataSet`s
|
||||||
|
- More fine-grained types in Action builders.
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
|
||||||
|
- `Name::replaceLast` API
|
||||||
|
- `PluginFactory` no longer requires plugin class
|
||||||
|
- Collection<Named> toMap -> associateByName
|
||||||
|
- Simplified `DFTL` envelope format. Closing symbols are unnecessary. Properties are discontinued.
|
||||||
|
- Meta `get` method allows nullable receiver
|
||||||
|
- `withDefault` functions do not add new keys to meta children and are consistent.
|
||||||
|
- `dataforge.meta.values` package is merged into `dataforge.meta` for better star imports
|
||||||
|
- Kotlin 1.8.20
|
||||||
|
- `Factory` is now `fun interface` and uses `build` instead of `invoke`. `invoke moved to an extension.
|
||||||
|
- KTor 2.0
|
||||||
|
- DataTree `items` call is blocking.
|
||||||
|
- DataSet `getData` is no longer suspended and renamed to `get`
|
||||||
|
- DataSet operates with sequences of data instead of flows
|
||||||
|
- PartialEnvelope uses `Int` instead `UInt`.
|
||||||
|
- `ActiveDataSet` renamed to `DataSource`
|
||||||
|
- `selectOne`->`getByType`
|
||||||
|
- Data traversal in `DataSet` is done via iterator
|
||||||
|
- Remove all unnecessary properties for `IOFormat`
|
||||||
|
- Separate interfaces for `IOReader` and `IOWriter`
|
||||||
|
|
||||||
|
### Deprecated
|
||||||
|
|
||||||
|
- Context.fetch -> Context.request
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
|
||||||
|
- `readDataDirectory` does not split names with dots
|
||||||
|
- Front matter reader does not crash on non-UTF files
|
||||||
|
- Meta file name in readMeta from directory
|
||||||
|
- Tagless and FrontMatter envelope partial readers fix.
|
||||||
|
|
||||||
|
## 0.5.2
|
||||||
|
|
||||||
|
### Added
|
||||||
|
|
||||||
|
- Yaml plugin
|
||||||
|
- Partial fix to #53
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
|
||||||
|
- MutableMetaImpl attachment and checks
|
||||||
|
- Listeners in observable meta are replaced by lists
|
||||||
|
- JS number comparison bug.
|
||||||
|
|
||||||
|
## 0.5.0
|
||||||
|
|
||||||
|
### Added
|
||||||
|
|
||||||
|
- Experimental `listOfSpec` delegate.
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
|
||||||
|
- **API breaking** Config is deprecated, use `ObservableMeta` instead.
|
||||||
|
- **API breaking** Descriptor no has a member property `defaultValue` instead of `defaultItem()` extension. It caches default value state on the first call. It is done because computing default on each call is too expensive.
|
||||||
|
- Kotlin 1.5.10
|
||||||
|
- Build tools 0.10.0
|
||||||
|
- Relaxed type restriction on `MetaConverter`. Now nullables are available.
|
||||||
|
- **Huge API-breaking refactoring of Meta**. Meta now can have both value and children. There is only one kind of descriptor now.
|
||||||
|
- **API breaking** `String.toName()` is replaced by `Name.parse()`
|
||||||
|
- **API breaking** Configurable`config` changed to `meta`
|
||||||
|
|
||||||
|
### Removed
|
||||||
|
|
||||||
|
- `Config`
|
||||||
|
- Public PluginManager mutability
|
||||||
|
- Tables and tables-exposed moved to the separate project `tables.kt`
|
||||||
|
- BinaryMetaFormat. Use CBOR encoding instead
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
|
||||||
|
- Proper json array index treatment.
|
||||||
|
- Proper json index for single-value array.
|
||||||
|
|
||||||
|
## 0.4.0
|
||||||
|
|
||||||
|
### Added
|
||||||
|
|
||||||
|
- LogManager plugin
|
||||||
|
- dataforge-context API dependency on SLF4j
|
||||||
|
- Context `withEnv` and `fetch` methods to manipulate plugins without changing plugins after creation.
|
||||||
|
- Split `ItemDescriptor` into builder and read-only part
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
|
||||||
|
- Kotlin-logging moved from common to JVM and JS. Replaced by console for native.
|
||||||
|
- Package changed to `space.kscience`
|
||||||
|
- Scheme made observable
|
||||||
|
- Global context is a variable (the singleton is hidden and will be deprecated in future)
|
||||||
|
- Kotlin 1.5
|
||||||
|
- Added blank builders for children context.
|
||||||
|
- Refactor loggers
|
||||||
|
|
||||||
|
### Deprecated
|
||||||
|
|
||||||
|
- Direct use of PluginManager
|
||||||
|
|
||||||
|
### Removed
|
||||||
|
|
||||||
|
- Common dependency on Kotlin-logging
|
||||||
|
- Kotlinx-io fork dependency. Replaced by Ktor-io.
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
|
||||||
|
- Scheme properties properly handle children property change.
|
||||||
|
|
||||||
|
## 0.3.0
|
||||||
|
|
||||||
|
### Added
|
||||||
|
|
||||||
|
- Yaml meta format based on yaml.kt
|
||||||
|
- `Path` builders
|
||||||
|
- Special ValueType for lists
|
||||||
|
- `copy` method to descriptors
|
||||||
|
- Multiplatform yaml meta
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
|
||||||
|
- `ListValue` and `DoubleArrayValue` implement `Iterable`.
|
||||||
|
- Changed the logic of `Value::isList` to check for type instead of size
|
||||||
|
- `Meta{}` builder made inline
|
||||||
|
- Moved `Envelope` builder to a top level function. Companion invoke is deprecated.
|
||||||
|
- Context logging moved to the extension
|
||||||
|
- `number` and `string` methods on `Value` moved to extensions (breaking change)
|
||||||
|
- \[Major breaking change\] Schemes and configurables us `MutableItemProvider` instead of `Config`
|
||||||
|
- \[Major breaking change\] `MetaItem` renamed to `TypedMetaItem` and `MetaItem` is now an alias for `TypedMetaItem<*>`
|
||||||
|
- \[Major breaking change\] Moved `NodeItem` and `ValueItem` to a top level
|
||||||
|
- Plugins are removed from Context constructor and added lazily in ContextBuilder
|
||||||
|
- \[Major breaking change\] Full refactor of DataTree/DataSource
|
||||||
|
- \[Major Breaking change\] Replace KClass with KType in data. Remove direct access to constructors with types.
|
||||||
|
|
||||||
|
## 0.2.0
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
|
||||||
- Context content resolution refactor
|
- Context content resolution refactor
|
||||||
- Kotlin 1.4.10 (build tools 0.6.0)
|
- Kotlin 1.4.10 (build tools 0.6.0)
|
||||||
- Empty query in Name is null instead of ""
|
- Empty query in Name is null instead of ""
|
||||||
@ -25,15 +202,16 @@
|
|||||||
- Configurable is no longer MutableItemProvider. All functionality moved to Scheme.
|
- Configurable is no longer MutableItemProvider. All functionality moved to Scheme.
|
||||||
|
|
||||||
### Deprecated
|
### Deprecated
|
||||||
|
|
||||||
- Context activation API
|
- Context activation API
|
||||||
- TextRenderer
|
- TextRenderer
|
||||||
|
|
||||||
### Removed
|
### Removed
|
||||||
|
|
||||||
- Functional server prototype
|
- Functional server prototype
|
||||||
- `dataforge-output` module
|
- `dataforge-output` module
|
||||||
|
|
||||||
### Fixed
|
### Fixed
|
||||||
|
|
||||||
- Global context CoroutineScope resolution
|
- Global context CoroutineScope resolution
|
||||||
- Library mode compliance
|
- Library mode compliance
|
||||||
|
|
||||||
### Security
|
|
||||||
|
201
LICENSE
Normal file
201
LICENSE
Normal file
@ -0,0 +1,201 @@
|
|||||||
|
Apache License
|
||||||
|
Version 2.0, January 2004
|
||||||
|
http://www.apache.org/licenses/
|
||||||
|
|
||||||
|
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||||
|
|
||||||
|
1. Definitions.
|
||||||
|
|
||||||
|
"License" shall mean the terms and conditions for use, reproduction,
|
||||||
|
and distribution as defined by Sections 1 through 9 of this document.
|
||||||
|
|
||||||
|
"Licensor" shall mean the copyright owner or entity authorized by
|
||||||
|
the copyright owner that is granting the License.
|
||||||
|
|
||||||
|
"Legal Entity" shall mean the union of the acting entity and all
|
||||||
|
other entities that control, are controlled by, or are under common
|
||||||
|
control with that entity. For the purposes of this definition,
|
||||||
|
"control" means (i) the power, direct or indirect, to cause the
|
||||||
|
direction or management of such entity, whether by contract or
|
||||||
|
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||||
|
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||||
|
|
||||||
|
"You" (or "Your") shall mean an individual or Legal Entity
|
||||||
|
exercising permissions granted by this License.
|
||||||
|
|
||||||
|
"Source" form shall mean the preferred form for making modifications,
|
||||||
|
including but not limited to software source code, documentation
|
||||||
|
source, and configuration files.
|
||||||
|
|
||||||
|
"Object" form shall mean any form resulting from mechanical
|
||||||
|
transformation or translation of a Source form, including but
|
||||||
|
not limited to compiled object code, generated documentation,
|
||||||
|
and conversions to other media types.
|
||||||
|
|
||||||
|
"Work" shall mean the work of authorship, whether in Source or
|
||||||
|
Object form, made available under the License, as indicated by a
|
||||||
|
copyright notice that is included in or attached to the work
|
||||||
|
(an example is provided in the Appendix below).
|
||||||
|
|
||||||
|
"Derivative Works" shall mean any work, whether in Source or Object
|
||||||
|
form, that is based on (or derived from) the Work and for which the
|
||||||
|
editorial revisions, annotations, elaborations, or other modifications
|
||||||
|
represent, as a whole, an original work of authorship. For the purposes
|
||||||
|
of this License, Derivative Works shall not include works that remain
|
||||||
|
separable from, or merely link (or bind by name) to the interfaces of,
|
||||||
|
the Work and Derivative Works thereof.
|
||||||
|
|
||||||
|
"Contribution" shall mean any work of authorship, including
|
||||||
|
the original version of the Work and any modifications or additions
|
||||||
|
to that Work or Derivative Works thereof, that is intentionally
|
||||||
|
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||||
|
or by an individual or Legal Entity authorized to submit on behalf of
|
||||||
|
the copyright owner. For the purposes of this definition, "submitted"
|
||||||
|
means any form of electronic, verbal, or written communication sent
|
||||||
|
to the Licensor or its representatives, including but not limited to
|
||||||
|
communication on electronic mailing lists, source code control systems,
|
||||||
|
and issue tracking systems that are managed by, or on behalf of, the
|
||||||
|
Licensor for the purpose of discussing and improving the Work, but
|
||||||
|
excluding communication that is conspicuously marked or otherwise
|
||||||
|
designated in writing by the copyright owner as "Not a Contribution."
|
||||||
|
|
||||||
|
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||||
|
on behalf of whom a Contribution has been received by Licensor and
|
||||||
|
subsequently incorporated within the Work.
|
||||||
|
|
||||||
|
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||||
|
this License, each Contributor hereby grants to You a perpetual,
|
||||||
|
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||||
|
copyright license to reproduce, prepare Derivative Works of,
|
||||||
|
publicly display, publicly perform, sublicense, and distribute the
|
||||||
|
Work and such Derivative Works in Source or Object form.
|
||||||
|
|
||||||
|
3. Grant of Patent License. Subject to the terms and conditions of
|
||||||
|
this License, each Contributor hereby grants to You a perpetual,
|
||||||
|
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||||
|
(except as stated in this section) patent license to make, have made,
|
||||||
|
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||||
|
where such license applies only to those patent claims licensable
|
||||||
|
by such Contributor that are necessarily infringed by their
|
||||||
|
Contribution(s) alone or by combination of their Contribution(s)
|
||||||
|
with the Work to which such Contribution(s) was submitted. If You
|
||||||
|
institute patent litigation against any entity (including a
|
||||||
|
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||||
|
or a Contribution incorporated within the Work constitutes direct
|
||||||
|
or contributory patent infringement, then any patent licenses
|
||||||
|
granted to You under this License for that Work shall terminate
|
||||||
|
as of the date such litigation is filed.
|
||||||
|
|
||||||
|
4. Redistribution. You may reproduce and distribute copies of the
|
||||||
|
Work or Derivative Works thereof in any medium, with or without
|
||||||
|
modifications, and in Source or Object form, provided that You
|
||||||
|
meet the following conditions:
|
||||||
|
|
||||||
|
(a) You must give any other recipients of the Work or
|
||||||
|
Derivative Works a copy of this License; and
|
||||||
|
|
||||||
|
(b) You must cause any modified files to carry prominent notices
|
||||||
|
stating that You changed the files; and
|
||||||
|
|
||||||
|
(c) You must retain, in the Source form of any Derivative Works
|
||||||
|
that You distribute, all copyright, patent, trademark, and
|
||||||
|
attribution notices from the Source form of the Work,
|
||||||
|
excluding those notices that do not pertain to any part of
|
||||||
|
the Derivative Works; and
|
||||||
|
|
||||||
|
(d) If the Work includes a "NOTICE" text file as part of its
|
||||||
|
distribution, then any Derivative Works that You distribute must
|
||||||
|
include a readable copy of the attribution notices contained
|
||||||
|
within such NOTICE file, excluding those notices that do not
|
||||||
|
pertain to any part of the Derivative Works, in at least one
|
||||||
|
of the following places: within a NOTICE text file distributed
|
||||||
|
as part of the Derivative Works; within the Source form or
|
||||||
|
documentation, if provided along with the Derivative Works; or,
|
||||||
|
within a display generated by the Derivative Works, if and
|
||||||
|
wherever such third-party notices normally appear. The contents
|
||||||
|
of the NOTICE file are for informational purposes only and
|
||||||
|
do not modify the License. You may add Your own attribution
|
||||||
|
notices within Derivative Works that You distribute, alongside
|
||||||
|
or as an addendum to the NOTICE text from the Work, provided
|
||||||
|
that such additional attribution notices cannot be construed
|
||||||
|
as modifying the License.
|
||||||
|
|
||||||
|
You may add Your own copyright statement to Your modifications and
|
||||||
|
may provide additional or different license terms and conditions
|
||||||
|
for use, reproduction, or distribution of Your modifications, or
|
||||||
|
for any such Derivative Works as a whole, provided Your use,
|
||||||
|
reproduction, and distribution of the Work otherwise complies with
|
||||||
|
the conditions stated in this License.
|
||||||
|
|
||||||
|
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||||
|
any Contribution intentionally submitted for inclusion in the Work
|
||||||
|
by You to the Licensor shall be under the terms and conditions of
|
||||||
|
this License, without any additional terms or conditions.
|
||||||
|
Notwithstanding the above, nothing herein shall supersede or modify
|
||||||
|
the terms of any separate license agreement you may have executed
|
||||||
|
with Licensor regarding such Contributions.
|
||||||
|
|
||||||
|
6. Trademarks. This License does not grant permission to use the trade
|
||||||
|
names, trademarks, service marks, or product names of the Licensor,
|
||||||
|
except as required for reasonable and customary use in describing the
|
||||||
|
origin of the Work and reproducing the content of the NOTICE file.
|
||||||
|
|
||||||
|
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||||
|
agreed to in writing, Licensor provides the Work (and each
|
||||||
|
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||||
|
implied, including, without limitation, any warranties or conditions
|
||||||
|
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||||
|
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||||
|
appropriateness of using or redistributing the Work and assume any
|
||||||
|
risks associated with Your exercise of permissions under this License.
|
||||||
|
|
||||||
|
8. Limitation of Liability. In no event and under no legal theory,
|
||||||
|
whether in tort (including negligence), contract, or otherwise,
|
||||||
|
unless required by applicable law (such as deliberate and grossly
|
||||||
|
negligent acts) or agreed to in writing, shall any Contributor be
|
||||||
|
liable to You for damages, including any direct, indirect, special,
|
||||||
|
incidental, or consequential damages of any character arising as a
|
||||||
|
result of this License or out of the use or inability to use the
|
||||||
|
Work (including but not limited to damages for loss of goodwill,
|
||||||
|
work stoppage, computer failure or malfunction, or any and all
|
||||||
|
other commercial damages or losses), even if such Contributor
|
||||||
|
has been advised of the possibility of such damages.
|
||||||
|
|
||||||
|
9. Accepting Warranty or Additional Liability. While redistributing
|
||||||
|
the Work or Derivative Works thereof, You may choose to offer,
|
||||||
|
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||||
|
or other liability obligations and/or rights consistent with this
|
||||||
|
License. However, in accepting such obligations, You may act only
|
||||||
|
on Your own behalf and on Your sole responsibility, not on behalf
|
||||||
|
of any other Contributor, and only if You agree to indemnify,
|
||||||
|
defend, and hold each Contributor harmless for any liability
|
||||||
|
incurred by, or claims asserted against, such Contributor by reason
|
||||||
|
of your accepting any such warranty or additional liability.
|
||||||
|
|
||||||
|
END OF TERMS AND CONDITIONS
|
||||||
|
|
||||||
|
APPENDIX: How to apply the Apache License to your work.
|
||||||
|
|
||||||
|
To apply the Apache License to your work, attach the following
|
||||||
|
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||||
|
replaced with your own identifying information. (Don't include
|
||||||
|
the brackets!) The text should be enclosed in the appropriate
|
||||||
|
comment syntax for the file format. We also recommend that a
|
||||||
|
file or class name and description of purpose be included on the
|
||||||
|
same "printed page" as the copyright notice for easier
|
||||||
|
identification within third-party archives.
|
||||||
|
|
||||||
|
Copyright [yyyy] [name of copyright owner]
|
||||||
|
|
||||||
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
you may not use this file except in compliance with the License.
|
||||||
|
You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
114
README.md
114
README.md
@ -3,100 +3,36 @@
|
|||||||
|
|
||||||
![Gradle build](https://github.com/mipt-npm/dataforge-core/workflows/Gradle%20build/badge.svg)
|
![Gradle build](https://github.com/mipt-npm/dataforge-core/workflows/Gradle%20build/badge.svg)
|
||||||
|
|
||||||
[ ![Download](https://api.bintray.com/packages/mipt-npm/dataforge/dataforge-meta/images/download.svg) ](https://bintray.com/mipt-npm/dataforge/dataforge-meta/_latestVersion)
|
|
||||||
|
|
||||||
|
### [dataforge-context](dataforge-context)
|
||||||
|
> Context and provider definitions
|
||||||
|
>
|
||||||
|
> **Maturity**: DEVELOPMENT
|
||||||
|
|
||||||
|
### [dataforge-data](dataforge-data)
|
||||||
|
>
|
||||||
|
> **Maturity**: EXPERIMENTAL
|
||||||
|
|
||||||
# Questions and Answers #
|
### [dataforge-io](dataforge-io)
|
||||||
|
> IO module
|
||||||
|
>
|
||||||
|
> **Maturity**: EXPERIMENTAL
|
||||||
|
|
||||||
In this section we will try to cover DataForge main ideas in the form of questions and answers.
|
### [dataforge-meta](dataforge-meta)
|
||||||
|
> Meta definition and basic operations on meta
|
||||||
|
>
|
||||||
|
> **Maturity**: DEVELOPMENT
|
||||||
|
|
||||||
## General ##
|
### [dataforge-scripting](dataforge-scripting)
|
||||||
|
>
|
||||||
|
> **Maturity**: PROTOTYPE
|
||||||
|
|
||||||
**Q:** I have a lot of data to analyze. The analysis process is complicated, requires a lot of stages and data flow is not always obvious. To top it the data size is huge, so I don't want to perform operation I don't need (calculate something I won't need or calculate something twice). And yes, I need it to be performed in parallel and probably on remote computer. By the way, I am sick and tired of scripts that modify other scripts that control scripts. Could you help me?
|
### [dataforge-workspace](dataforge-workspace)
|
||||||
|
>
|
||||||
|
> **Maturity**: EXPERIMENTAL
|
||||||
|
|
||||||
**A:** Yes, that is the precisely the problem DataForge was made to solve. It allows to perform some automated data manipulations with automatic optimization and parallelization. The important thing that data processing recipes are made in the declarative way, so it is quite easy to perform computations on a remote station. Also DataForge guarantees reproducibility of analysis results.
|
### [dataforge-io/dataforge-io-yaml](dataforge-io/dataforge-io-yaml)
|
||||||
<hr>
|
> YAML meta converters and Front Matter envelope format
|
||||||
|
>
|
||||||
|
> **Maturity**: PROTOTYPE
|
||||||
|
|
||||||
**Q:** How does it work?
|
|
||||||
|
|
||||||
**A:** At the core of DataForge lies the idea of **metadata processor**. It utilizes the statement that in order to analyze something you need data itself and some additional information about what does that data represent and what does user want as a result. This additional information is called metadata and could be organized in a regular structure (a tree of values not unlike XML or JSON). The important thing is that this distinction leaves no place for user instructions (or scripts). Indeed, the idea of DataForge logic is that one do not need imperative commands. The framework configures itself according to input meta-data and decides what operations should be performed in the most efficient way.
|
|
||||||
<hr>
|
|
||||||
|
|
||||||
**Q:** But where does it take algorithms to use?
|
|
||||||
|
|
||||||
**A:** Of course algorithms must be written somewhere. No magic here. The logic is written in specialized modules. Some modules are provided out of the box at the system core, some need to be developed for specific problem.
|
|
||||||
<hr>
|
|
||||||
|
|
||||||
**Q:** So I still need to write the code? What is the difference then?
|
|
||||||
|
|
||||||
**A:** Yes, someone still need to write the code. But not necessary you. Simple operations could be performed using provided core logic. Also your group can have one programmer writing the logic and all other using it without any real programming expertise. Also the framework organized in a such way that one writes some additional logic, he do not need to thing about complicated thing like parallel computing, resource handling, logging, caching etc. Most of the things are done by the DataForge.
|
|
||||||
<hr>
|
|
||||||
|
|
||||||
## Platform ##
|
|
||||||
|
|
||||||
**Q:** Which platform does DataForge use? Which operation system is it working on?
|
|
||||||
|
|
||||||
**A:** The DataForge is mostly written in Java and utilizes JVM as a platform. It works on any system that supports JVM (meaning almost any modern system excluding some mobile platforms).
|
|
||||||
<hr>
|
|
||||||
|
|
||||||
**Q:** But Java... it is slow!
|
|
||||||
|
|
||||||
**A:** [It is not](https://stackoverflow.com/questions/2163411/is-java-really-slow/2163570#2163570). It lacks some hardware specific optimizations and requires some additional time to start (due to JIT nature), but otherwise it is at least as fast as other languages traditionally used in science. More importantly, the memory safety, tooling support and vast ecosystem makes it №1 candidate for data analysis framework.
|
|
||||||
|
|
||||||
<hr>
|
|
||||||
|
|
||||||
**Q:** Can I use my C++/Fortran/Python code in DataForge?
|
|
||||||
|
|
||||||
**A:** Yes, as long as the code could be called from Java. Most of common languages have a bridge for Java access. There are completely no problems with compiled C/Fortran libraries. Python code could be called via one of existing python-java interfaces. It is also planned to implement remote method invocation for common languages, so your Python, or, say, Julia, code could run in its native environment. The metadata processor paradigm makes it much easier to do so.
|
|
||||||
|
|
||||||
<hr>
|
|
||||||
|
|
||||||
## Features ##
|
|
||||||
|
|
||||||
**Q:** What other features does DataForge provide?
|
|
||||||
|
|
||||||
**A:** Alongside metadata processing (and a lot of tools for metadata manipulation and layering), DataForge has two additional important concepts:
|
|
||||||
|
|
||||||
* **Modularisation**. Contrary to lot other frameworks, DataForge is intrinsically modular. The mandatory part is a rather tiny core module. Everything else could be customized.
|
|
||||||
|
|
||||||
* **Context encapsulation**. Every DataForge task is executed in some context. The context isolates environment for the task and also works as dependency injection base and specifies interaction of the task with the external world.
|
|
||||||
|
|
||||||
|
|
||||||
<!--<hr>
|
|
||||||
|
|
||||||
**Q:** OK, but now I want to work directly with my measuring devices. How can I do that?
|
|
||||||
|
|
||||||
**A:** The [dataforge-control](${site.url}/docs.html#control) module provides interfaces to interact with the hardware. Out of the box it supports safe communication with TCP/IP or COM/tty based devices. Specific device declaration could be done via additional modules. It is also possible to maintain data storage with [datforge-storage](${site.url}/docs.htm#storage) module.-->
|
|
||||||
|
|
||||||
<hr>
|
|
||||||
|
|
||||||
<!--**Q:** Declarations and metadata are good, but I want my scripts back!
|
|
||||||
|
|
||||||
**A:** We can do that. [GRIND](${site.url}/docs.html#grind) provides a shell-like environment called GrindShell. It allows to run imperative scripts with full access to all of the DataForge functionality. Grind scripts are basically context-encapsulated. Also there are convenient feature wrappers called helpers that could be loaded into the shell when new features modules are added.-->
|
|
||||||
|
|
||||||
<hr>
|
|
||||||
|
|
||||||
## Misc ##
|
|
||||||
|
|
||||||
**Q:** So everything looks great, can I replace my ROOT / other data analysis framework with DataForge?
|
|
||||||
|
|
||||||
**A:** One must note, that DataForge is made for analysis, not for visualisation. The visualisation and user interaction capabilities of DataForge are rather limited compared to frameworks like ROOT, JAS3 or DataMelt. The idea is to provide reliable API and core functionality. In fact JAS3 and DataMelt could be used as a frontend for DataForge mechanics. It is planned to add an interface to ROOT via JFreeHep AIDA.
|
|
||||||
|
|
||||||
<hr>
|
|
||||||
|
|
||||||
**Q:** How does DataForge compare to cluster computation frameworks like Hadoop or Spark?
|
|
||||||
|
|
||||||
**A:** Again, it is not the purpose of DataForge to replace cluster software. DataForge has some internal parallelism mechanics and implementations, but they are most certainly worse then specially developed programs. Still, DataForge is not fixed on one single implementation. Your favourite parallel processing tool could be still used as a back-end for the DataForge. With full benefit of configuration tools, integrations and no performance overhead.
|
|
||||||
<!--
|
|
||||||
<hr>
|
|
||||||
|
|
||||||
**Q:** Is it possible to use DataForge in notebook mode?
|
|
||||||
|
|
||||||
**A:** Yes, it is. DataForge can be used as is from [beaker/beakerx](http://beakernotebook.com/) groovy kernel with minor additional adjustments. It is planned to provide separate DataForge kernel to `beakerx` which will automatically call a specific GRIND shell.-->
|
|
||||||
|
|
||||||
<hr>
|
|
||||||
|
|
||||||
**Q:** Can I use DataForge on a mobile platform?
|
|
||||||
|
|
||||||
**A:** DataForge is modular. Core and the most of api are pretty compact, so it could be used in Android applications. Some modules are designed for PC and could not be used on other platforms. IPhone does not support Java and therefore could use only client-side DataForge applications.
|
|
||||||
|
@ -1,24 +1,39 @@
|
|||||||
|
import org.jetbrains.kotlin.gradle.tasks.KotlinCompile
|
||||||
|
import space.kscience.gradle.useApache2Licence
|
||||||
|
import space.kscience.gradle.useSPCTeam
|
||||||
|
|
||||||
plugins {
|
plugins {
|
||||||
id("ru.mipt.npm.project")
|
id("space.kscience.gradle.project")
|
||||||
}
|
}
|
||||||
|
|
||||||
val dataforgeVersion by extra("0.2.0")
|
|
||||||
|
|
||||||
val bintrayRepo by extra("dataforge")
|
|
||||||
val githubProject by extra("dataforge-core")
|
|
||||||
val spaceRepo by extra("https://maven.jetbrains.space/mipt-npm/p/df/maven")
|
|
||||||
|
|
||||||
allprojects {
|
allprojects {
|
||||||
group = "hep.dataforge"
|
group = "space.kscience"
|
||||||
version = dataforgeVersion
|
version = "0.7.0"
|
||||||
|
|
||||||
apply<org.jetbrains.dokka.gradle.DokkaPlugin>()
|
|
||||||
|
|
||||||
repositories {
|
|
||||||
mavenLocal()
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
subprojects {
|
subprojects {
|
||||||
apply(plugin = "ru.mipt.npm.publish")
|
apply(plugin = "maven-publish")
|
||||||
|
|
||||||
|
tasks.withType<KotlinCompile> {
|
||||||
|
kotlinOptions {
|
||||||
|
freeCompilerArgs = freeCompilerArgs + "-Xcontext-receivers"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
readme {
|
||||||
|
readmeTemplate = file("docs/templates/README-TEMPLATE.md")
|
||||||
|
}
|
||||||
|
|
||||||
|
ksciencePublish {
|
||||||
|
pom("https://github.com/SciProgCentre/kmath") {
|
||||||
|
useApache2Licence()
|
||||||
|
useSPCTeam()
|
||||||
|
}
|
||||||
|
repository("spc","https://maven.sciprog.center/kscience")
|
||||||
|
sonatype()
|
||||||
|
}
|
||||||
|
|
||||||
|
apiValidation {
|
||||||
|
nonPublicMarkers.add("space.kscience.dataforge.misc.DFExperimental")
|
||||||
}
|
}
|
23
dataforge-context/README.md
Normal file
23
dataforge-context/README.md
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
# Module dataforge-context
|
||||||
|
|
||||||
|
Context and provider definitions
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
## Artifact:
|
||||||
|
|
||||||
|
The Maven coordinates of this project are `space.kscience:dataforge-context:0.7.0`.
|
||||||
|
|
||||||
|
**Gradle Kotlin DSL:**
|
||||||
|
```kotlin
|
||||||
|
repositories {
|
||||||
|
maven("https://repo.kotlin.link")
|
||||||
|
//uncomment to access development builds
|
||||||
|
//maven("https://maven.pkg.jetbrains.space/spc/p/sci/dev")
|
||||||
|
mavenCentral()
|
||||||
|
}
|
||||||
|
|
||||||
|
dependencies {
|
||||||
|
implementation("space.kscience:dataforge-context:0.7.0")
|
||||||
|
}
|
||||||
|
```
|
@ -1,276 +1,271 @@
|
|||||||
public abstract class hep/dataforge/context/AbstractPlugin : hep/dataforge/context/Plugin {
|
public abstract class space/kscience/dataforge/context/AbstractPlugin : space/kscience/dataforge/context/Plugin {
|
||||||
public fun <init> ()V
|
public fun <init> ()V
|
||||||
public fun <init> (Lhep/dataforge/meta/Meta;)V
|
public fun <init> (Lspace/kscience/dataforge/meta/Meta;)V
|
||||||
public synthetic fun <init> (Lhep/dataforge/meta/Meta;ILkotlin/jvm/internal/DefaultConstructorMarker;)V
|
public synthetic fun <init> (Lspace/kscience/dataforge/meta/Meta;ILkotlin/jvm/internal/DefaultConstructorMarker;)V
|
||||||
public fun attach (Lhep/dataforge/context/Context;)V
|
public fun attach (Lspace/kscience/dataforge/context/Context;)V
|
||||||
public fun content (Ljava/lang/String;)Ljava/util/Map;
|
public fun dependsOn ()Ljava/util/Map;
|
||||||
public synthetic fun dependsOn ()Ljava/util/Collection;
|
|
||||||
public final fun dependsOn ()Ljava/util/List;
|
|
||||||
public fun detach ()V
|
public fun detach ()V
|
||||||
public fun getContext ()Lhep/dataforge/context/Context;
|
public fun getContext ()Lspace/kscience/dataforge/context/Context;
|
||||||
public fun getDefaultChainTarget ()Ljava/lang/String;
|
public fun getMeta ()Lspace/kscience/dataforge/meta/Meta;
|
||||||
public fun getDefaultTarget ()Ljava/lang/String;
|
public fun isAttached ()Z
|
||||||
public fun getLogger ()Lmu/KLogger;
|
protected final fun require (Lspace/kscience/dataforge/context/PluginFactory;Lkotlin/reflect/KClass;Lspace/kscience/dataforge/meta/Meta;)Lkotlin/properties/ReadOnlyProperty;
|
||||||
public fun getMeta ()Lhep/dataforge/meta/Meta;
|
public static synthetic fun require$default (Lspace/kscience/dataforge/context/AbstractPlugin;Lspace/kscience/dataforge/context/PluginFactory;Lkotlin/reflect/KClass;Lspace/kscience/dataforge/meta/Meta;ILjava/lang/Object;)Lkotlin/properties/ReadOnlyProperty;
|
||||||
public fun getName ()Lhep/dataforge/names/Name;
|
|
||||||
protected final fun require (Lhep/dataforge/context/PluginFactory;)Lkotlin/properties/ReadOnlyProperty;
|
|
||||||
public fun toMeta ()Lhep/dataforge/meta/Meta;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public final class hep/dataforge/context/AbstractPluginKt {
|
public final class space/kscience/dataforge/context/AbstractPluginKt {
|
||||||
public static final fun toMap (Ljava/util/Collection;)Ljava/util/Map;
|
public static final fun associateByName (Ljava/util/Collection;)Ljava/util/Map;
|
||||||
}
|
}
|
||||||
|
|
||||||
public final class hep/dataforge/context/ClassLoaderPlugin : hep/dataforge/context/AbstractPlugin {
|
public final class space/kscience/dataforge/context/ClassLoaderPlugin : space/kscience/dataforge/context/AbstractPlugin {
|
||||||
public static final field Companion Lhep/dataforge/context/ClassLoaderPlugin$Companion;
|
public static final field Companion Lspace/kscience/dataforge/context/ClassLoaderPlugin$Companion;
|
||||||
public fun <init> (Ljava/lang/ClassLoader;)V
|
public fun <init> (Ljava/lang/ClassLoader;)V
|
||||||
public fun getTag ()Lhep/dataforge/context/PluginTag;
|
public fun getTag ()Lspace/kscience/dataforge/context/PluginTag;
|
||||||
public final fun services (Lkotlin/reflect/KClass;)Lkotlin/sequences/Sequence;
|
public final fun services (Lkotlin/reflect/KClass;)Lkotlin/sequences/Sequence;
|
||||||
}
|
}
|
||||||
|
|
||||||
public final class hep/dataforge/context/ClassLoaderPlugin$Companion {
|
public final class space/kscience/dataforge/context/ClassLoaderPlugin$Companion {
|
||||||
public final fun getDEFAULT ()Lhep/dataforge/context/ClassLoaderPlugin;
|
public final fun getDEFAULT ()Lspace/kscience/dataforge/context/ClassLoaderPlugin;
|
||||||
}
|
}
|
||||||
|
|
||||||
public final class hep/dataforge/context/ClassLoaderPluginKt {
|
public final class space/kscience/dataforge/context/ClassLoaderPluginKt {
|
||||||
public static final fun getClassLoaderPlugin (Lhep/dataforge/context/Context;)Lhep/dataforge/context/ClassLoaderPlugin;
|
public static final fun getClassLoaderPlugin (Lspace/kscience/dataforge/context/Context;)Lspace/kscience/dataforge/context/ClassLoaderPlugin;
|
||||||
}
|
}
|
||||||
|
|
||||||
public class hep/dataforge/context/Context : hep/dataforge/context/Named, hep/dataforge/meta/MetaRepr, hep/dataforge/provider/Provider, kotlinx/coroutines/CoroutineScope {
|
public class space/kscience/dataforge/context/Context : kotlinx/coroutines/CoroutineScope, space/kscience/dataforge/meta/MetaRepr, space/kscience/dataforge/misc/Named, space/kscience/dataforge/provider/Provider {
|
||||||
public static final field Companion Lhep/dataforge/context/Context$Companion;
|
public static final field Companion Lspace/kscience/dataforge/context/Context$Companion;
|
||||||
public static final field PROPERTY_TARGET Ljava/lang/String;
|
public static final field PROPERTY_TARGET Ljava/lang/String;
|
||||||
public fun <init> (Lhep/dataforge/names/Name;Lhep/dataforge/context/Context;Lhep/dataforge/meta/Meta;Ljava/util/Set;)V
|
public final fun buildContext (Lspace/kscience/dataforge/names/Name;Lkotlin/jvm/functions/Function1;)Lspace/kscience/dataforge/context/Context;
|
||||||
public synthetic fun <init> (Lhep/dataforge/names/Name;Lhep/dataforge/context/Context;Lhep/dataforge/meta/Meta;Ljava/util/Set;ILkotlin/jvm/internal/DefaultConstructorMarker;)V
|
public static synthetic fun buildContext$default (Lspace/kscience/dataforge/context/Context;Lspace/kscience/dataforge/names/Name;Lkotlin/jvm/functions/Function1;ILjava/lang/Object;)Lspace/kscience/dataforge/context/Context;
|
||||||
public fun close ()V
|
public fun close ()V
|
||||||
public fun content (Ljava/lang/String;)Ljava/util/Map;
|
public fun content (Ljava/lang/String;)Ljava/util/Map;
|
||||||
public final fun content (Ljava/lang/String;Z)Ljava/util/Map;
|
public final fun content (Ljava/lang/String;Z)Ljava/util/Map;
|
||||||
public fun getCoroutineContext ()Lkotlin/coroutines/CoroutineContext;
|
public fun getCoroutineContext ()Lkotlin/coroutines/CoroutineContext;
|
||||||
public fun getDefaultChainTarget ()Ljava/lang/String;
|
|
||||||
public fun getDefaultTarget ()Ljava/lang/String;
|
public fun getDefaultTarget ()Ljava/lang/String;
|
||||||
public final fun getLogger ()Lmu/KLogger;
|
public final fun getName ()Lspace/kscience/dataforge/names/Name;
|
||||||
public final fun getName ()Lhep/dataforge/names/Name;
|
public final fun getParent ()Lspace/kscience/dataforge/context/Context;
|
||||||
public final fun getParent ()Lhep/dataforge/context/Context;
|
public final fun getPlugins ()Lspace/kscience/dataforge/context/PluginManager;
|
||||||
public final fun getPlugins ()Lhep/dataforge/context/PluginManager;
|
public final fun getProperties ()Lspace/kscience/dataforge/meta/Laminate;
|
||||||
public fun toMeta ()Lhep/dataforge/meta/Meta;
|
public fun toMeta ()Lspace/kscience/dataforge/meta/Meta;
|
||||||
}
|
}
|
||||||
|
|
||||||
public final class hep/dataforge/context/Context$Companion {
|
public final class space/kscience/dataforge/context/Context$Companion {
|
||||||
}
|
}
|
||||||
|
|
||||||
public abstract interface class hep/dataforge/context/ContextAware {
|
public abstract interface class space/kscience/dataforge/context/ContextAware {
|
||||||
public abstract fun getContext ()Lhep/dataforge/context/Context;
|
public abstract fun getContext ()Lspace/kscience/dataforge/context/Context;
|
||||||
public abstract fun getLogger ()Lmu/KLogger;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public final class hep/dataforge/context/ContextAware$DefaultImpls {
|
public final class space/kscience/dataforge/context/ContextBuilder {
|
||||||
public static fun getLogger (Lhep/dataforge/context/ContextAware;)Lmu/KLogger;
|
public final fun build ()Lspace/kscience/dataforge/context/Context;
|
||||||
}
|
public final fun getName ()Lspace/kscience/dataforge/names/Name;
|
||||||
|
|
||||||
public final class hep/dataforge/context/ContextBuilder {
|
|
||||||
public fun <init> ()V
|
|
||||||
public fun <init> (Lhep/dataforge/context/Context;Ljava/lang/String;)V
|
|
||||||
public synthetic fun <init> (Lhep/dataforge/context/Context;Ljava/lang/String;ILkotlin/jvm/internal/DefaultConstructorMarker;)V
|
|
||||||
public final fun build ()Lhep/dataforge/context/Context;
|
|
||||||
public final fun getName ()Ljava/lang/String;
|
|
||||||
public final fun plugin (Lhep/dataforge/context/Plugin;)V
|
|
||||||
public final fun plugin (Lhep/dataforge/context/PluginFactory;Lkotlin/jvm/functions/Function1;)V
|
|
||||||
public final fun plugin (Lhep/dataforge/context/PluginTag;Lkotlin/jvm/functions/Function1;)V
|
|
||||||
public final fun plugin (Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;Lkotlin/jvm/functions/Function1;)V
|
public final fun plugin (Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;Lkotlin/jvm/functions/Function1;)V
|
||||||
public static synthetic fun plugin$default (Lhep/dataforge/context/ContextBuilder;Lhep/dataforge/context/PluginFactory;Lkotlin/jvm/functions/Function1;ILjava/lang/Object;)V
|
public final fun plugin (Lspace/kscience/dataforge/context/Plugin;)V
|
||||||
public static synthetic fun plugin$default (Lhep/dataforge/context/ContextBuilder;Lhep/dataforge/context/PluginTag;Lkotlin/jvm/functions/Function1;ILjava/lang/Object;)V
|
public final fun plugin (Lspace/kscience/dataforge/context/PluginFactory;Lkotlin/jvm/functions/Function1;)V
|
||||||
public static synthetic fun plugin$default (Lhep/dataforge/context/ContextBuilder;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;Lkotlin/jvm/functions/Function1;ILjava/lang/Object;)V
|
public final fun plugin (Lspace/kscience/dataforge/context/PluginFactory;Lspace/kscience/dataforge/meta/Meta;)V
|
||||||
|
public final fun plugin (Lspace/kscience/dataforge/context/PluginTag;Lkotlin/jvm/functions/Function1;)V
|
||||||
|
public static synthetic fun plugin$default (Lspace/kscience/dataforge/context/ContextBuilder;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;Lkotlin/jvm/functions/Function1;ILjava/lang/Object;)V
|
||||||
|
public static synthetic fun plugin$default (Lspace/kscience/dataforge/context/ContextBuilder;Lspace/kscience/dataforge/context/PluginFactory;Lkotlin/jvm/functions/Function1;ILjava/lang/Object;)V
|
||||||
|
public static synthetic fun plugin$default (Lspace/kscience/dataforge/context/ContextBuilder;Lspace/kscience/dataforge/context/PluginTag;Lkotlin/jvm/functions/Function1;ILjava/lang/Object;)V
|
||||||
public final fun properties (Lkotlin/jvm/functions/Function1;)V
|
public final fun properties (Lkotlin/jvm/functions/Function1;)V
|
||||||
public final fun setName (Ljava/lang/String;)V
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public abstract interface class hep/dataforge/context/Factory {
|
public final class space/kscience/dataforge/context/ContextBuilderKt {
|
||||||
public abstract fun invoke (Lhep/dataforge/meta/Meta;Lhep/dataforge/context/Context;)Ljava/lang/Object;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public final class hep/dataforge/context/Factory$DefaultImpls {
|
public final class space/kscience/dataforge/context/DefaultLogManager : space/kscience/dataforge/context/AbstractPlugin, space/kscience/dataforge/context/LogManager {
|
||||||
public static synthetic fun invoke$default (Lhep/dataforge/context/Factory;Lhep/dataforge/meta/Meta;Lhep/dataforge/context/Context;ILjava/lang/Object;)Ljava/lang/Object;
|
public static final field Companion Lspace/kscience/dataforge/context/DefaultLogManager$Companion;
|
||||||
|
public fun <init> ()V
|
||||||
|
public fun getDefaultLogger ()Lspace/kscience/dataforge/context/Logger;
|
||||||
|
public fun getTag ()Lspace/kscience/dataforge/context/PluginTag;
|
||||||
|
public fun logger (Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/context/Logger;
|
||||||
}
|
}
|
||||||
|
|
||||||
public final class hep/dataforge/context/Global : hep/dataforge/context/Context {
|
public final class space/kscience/dataforge/context/DefaultLogManager$Companion : space/kscience/dataforge/context/PluginFactory {
|
||||||
public static final field INSTANCE Lhep/dataforge/context/Global;
|
public synthetic fun build (Lspace/kscience/dataforge/context/Context;Lspace/kscience/dataforge/meta/Meta;)Ljava/lang/Object;
|
||||||
public fun close ()V
|
public fun build (Lspace/kscience/dataforge/context/Context;Lspace/kscience/dataforge/meta/Meta;)Lspace/kscience/dataforge/context/DefaultLogManager;
|
||||||
public final fun context (Ljava/lang/String;Lhep/dataforge/context/Context;Lkotlin/jvm/functions/Function1;)Lhep/dataforge/context/Context;
|
public fun getTag ()Lspace/kscience/dataforge/context/PluginTag;
|
||||||
public static synthetic fun context$default (Lhep/dataforge/context/Global;Ljava/lang/String;Lhep/dataforge/context/Context;Lkotlin/jvm/functions/Function1;ILjava/lang/Object;)Lhep/dataforge/context/Context;
|
|
||||||
public final fun getContext (Ljava/lang/String;)Lhep/dataforge/context/Context;
|
|
||||||
public fun getCoroutineContext ()Lkotlin/coroutines/CoroutineContext;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public abstract interface class hep/dataforge/context/Named {
|
public abstract interface class space/kscience/dataforge/context/Factory {
|
||||||
public static final field Companion Lhep/dataforge/context/Named$Companion;
|
public abstract fun build (Lspace/kscience/dataforge/context/Context;Lspace/kscience/dataforge/meta/Meta;)Ljava/lang/Object;
|
||||||
public abstract fun getName ()Lhep/dataforge/names/Name;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public final class hep/dataforge/context/Named$Companion {
|
public final class space/kscience/dataforge/context/FactoryKt {
|
||||||
public final fun nameOf (Ljava/lang/Object;)Lhep/dataforge/names/Name;
|
public static final fun invoke (Lspace/kscience/dataforge/context/Factory;Lspace/kscience/dataforge/meta/Meta;Lspace/kscience/dataforge/context/Context;)Ljava/lang/Object;
|
||||||
|
public static synthetic fun invoke$default (Lspace/kscience/dataforge/context/Factory;Lspace/kscience/dataforge/meta/Meta;Lspace/kscience/dataforge/context/Context;ILjava/lang/Object;)Ljava/lang/Object;
|
||||||
}
|
}
|
||||||
|
|
||||||
public final class hep/dataforge/context/NamedKt {
|
public final class space/kscience/dataforge/context/GlobalKt {
|
||||||
public static final fun isAnonymous (Lhep/dataforge/context/Named;)Z
|
public static final fun Context (Ljava/lang/String;Lkotlin/jvm/functions/Function1;)Lspace/kscience/dataforge/context/Context;
|
||||||
|
public static synthetic fun Context$default (Ljava/lang/String;Lkotlin/jvm/functions/Function1;ILjava/lang/Object;)Lspace/kscience/dataforge/context/Context;
|
||||||
|
public static final fun getGlobal ()Lspace/kscience/dataforge/context/Context;
|
||||||
}
|
}
|
||||||
|
|
||||||
public abstract interface class hep/dataforge/context/Plugin : hep/dataforge/context/ContextAware, hep/dataforge/context/Named, hep/dataforge/meta/MetaRepr, hep/dataforge/provider/Provider {
|
public abstract interface class space/kscience/dataforge/context/LogManager : space/kscience/dataforge/context/Logger, space/kscience/dataforge/context/Plugin {
|
||||||
public static final field Companion Lhep/dataforge/context/Plugin$Companion;
|
public static final field Companion Lspace/kscience/dataforge/context/LogManager$Companion;
|
||||||
|
public static final field DEBUG Ljava/lang/String;
|
||||||
|
public static final field ERROR Ljava/lang/String;
|
||||||
|
public static final field INFO Ljava/lang/String;
|
||||||
|
public static final field TRACE Ljava/lang/String;
|
||||||
|
public static final field WARNING Ljava/lang/String;
|
||||||
|
public abstract fun getDefaultLogger ()Lspace/kscience/dataforge/context/Logger;
|
||||||
|
public fun log (Ljava/lang/String;Lkotlin/jvm/functions/Function0;)V
|
||||||
|
public fun log (Lspace/kscience/dataforge/names/Name;Ljava/lang/String;Lkotlin/jvm/functions/Function0;)V
|
||||||
|
public abstract fun logger (Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/context/Logger;
|
||||||
|
}
|
||||||
|
|
||||||
|
public final class space/kscience/dataforge/context/LogManager$Companion {
|
||||||
|
public static final field DEBUG Ljava/lang/String;
|
||||||
|
public static final field ERROR Ljava/lang/String;
|
||||||
|
public static final field INFO Ljava/lang/String;
|
||||||
|
public static final field TRACE Ljava/lang/String;
|
||||||
|
public static final field WARNING Ljava/lang/String;
|
||||||
|
}
|
||||||
|
|
||||||
|
public final class space/kscience/dataforge/context/LogManagerKt {
|
||||||
|
public static final fun debug (Lspace/kscience/dataforge/context/Logger;Lkotlin/jvm/functions/Function0;)V
|
||||||
|
public static final fun error (Lspace/kscience/dataforge/context/Logger;Ljava/lang/Throwable;Lkotlin/jvm/functions/Function0;)V
|
||||||
|
public static final fun error (Lspace/kscience/dataforge/context/Logger;Lkotlin/jvm/functions/Function0;)V
|
||||||
|
public static final fun getLogger (Lspace/kscience/dataforge/context/Context;)Lspace/kscience/dataforge/context/LogManager;
|
||||||
|
public static final fun getLogger (Lspace/kscience/dataforge/context/ContextAware;)Lspace/kscience/dataforge/context/Logger;
|
||||||
|
public static final fun info (Lspace/kscience/dataforge/context/Logger;Lkotlin/jvm/functions/Function0;)V
|
||||||
|
public static final fun trace (Lspace/kscience/dataforge/context/Logger;Lkotlin/jvm/functions/Function0;)V
|
||||||
|
public static final fun warn (Lspace/kscience/dataforge/context/Logger;Lkotlin/jvm/functions/Function0;)V
|
||||||
|
}
|
||||||
|
|
||||||
|
public abstract interface class space/kscience/dataforge/context/Logger {
|
||||||
|
public abstract fun log (Ljava/lang/String;Lkotlin/jvm/functions/Function0;)V
|
||||||
|
}
|
||||||
|
|
||||||
|
public abstract interface class space/kscience/dataforge/context/Plugin : space/kscience/dataforge/context/ContextAware, space/kscience/dataforge/meta/MetaRepr, space/kscience/dataforge/misc/Named, space/kscience/dataforge/provider/Provider {
|
||||||
|
public static final field Companion Lspace/kscience/dataforge/context/Plugin$Companion;
|
||||||
public static final field TARGET Ljava/lang/String;
|
public static final field TARGET Ljava/lang/String;
|
||||||
public abstract fun attach (Lhep/dataforge/context/Context;)V
|
public abstract fun attach (Lspace/kscience/dataforge/context/Context;)V
|
||||||
public abstract fun dependsOn ()Ljava/util/Collection;
|
public abstract fun dependsOn ()Ljava/util/Map;
|
||||||
public abstract fun detach ()V
|
public abstract fun detach ()V
|
||||||
public abstract fun getMeta ()Lhep/dataforge/meta/Meta;
|
public abstract fun getMeta ()Lspace/kscience/dataforge/meta/Meta;
|
||||||
public abstract fun getName ()Lhep/dataforge/names/Name;
|
public fun getName ()Lspace/kscience/dataforge/names/Name;
|
||||||
public abstract fun getTag ()Lhep/dataforge/context/PluginTag;
|
public abstract fun getTag ()Lspace/kscience/dataforge/context/PluginTag;
|
||||||
public abstract fun toMeta ()Lhep/dataforge/meta/Meta;
|
public abstract fun isAttached ()Z
|
||||||
|
public fun toMeta ()Lspace/kscience/dataforge/meta/Meta;
|
||||||
}
|
}
|
||||||
|
|
||||||
public final class hep/dataforge/context/Plugin$Companion {
|
public final class space/kscience/dataforge/context/Plugin$Companion {
|
||||||
public static final field TARGET Ljava/lang/String;
|
public static final field TARGET Ljava/lang/String;
|
||||||
}
|
}
|
||||||
|
|
||||||
public final class hep/dataforge/context/Plugin$DefaultImpls {
|
public final class space/kscience/dataforge/context/PluginBuilder {
|
||||||
public static fun content (Lhep/dataforge/context/Plugin;Ljava/lang/String;)Ljava/util/Map;
|
public fun <init> (Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;)V
|
||||||
public static fun getDefaultChainTarget (Lhep/dataforge/context/Plugin;)Ljava/lang/String;
|
public synthetic fun <init> (Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;ILkotlin/jvm/internal/DefaultConstructorMarker;)V
|
||||||
public static fun getDefaultTarget (Lhep/dataforge/context/Plugin;)Ljava/lang/String;
|
public final fun build ()Lspace/kscience/dataforge/context/PluginFactory;
|
||||||
public static fun getLogger (Lhep/dataforge/context/Plugin;)Lmu/KLogger;
|
public final fun getTag ()Lspace/kscience/dataforge/context/PluginTag;
|
||||||
public static fun getName (Lhep/dataforge/context/Plugin;)Lhep/dataforge/names/Name;
|
public final fun provides (Ljava/lang/String;Ljava/util/Map;)V
|
||||||
public static fun toMeta (Lhep/dataforge/context/Plugin;)Lhep/dataforge/meta/Meta;
|
public final fun provides (Ljava/lang/String;[Lspace/kscience/dataforge/misc/Named;)V
|
||||||
|
public final fun requires (Lspace/kscience/dataforge/context/PluginFactory;Lspace/kscience/dataforge/meta/Meta;)V
|
||||||
|
public static synthetic fun requires$default (Lspace/kscience/dataforge/context/PluginBuilder;Lspace/kscience/dataforge/context/PluginFactory;Lspace/kscience/dataforge/meta/Meta;ILjava/lang/Object;)V
|
||||||
}
|
}
|
||||||
|
|
||||||
public abstract interface class hep/dataforge/context/PluginFactory : hep/dataforge/context/Factory {
|
public final class space/kscience/dataforge/context/PluginBuilderKt {
|
||||||
public static final field Companion Lhep/dataforge/context/PluginFactory$Companion;
|
public static final fun PluginFactory (Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;Lkotlin/jvm/functions/Function1;)Lspace/kscience/dataforge/context/PluginFactory;
|
||||||
|
public static synthetic fun PluginFactory$default (Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;Lkotlin/jvm/functions/Function1;ILjava/lang/Object;)Lspace/kscience/dataforge/context/PluginFactory;
|
||||||
|
}
|
||||||
|
|
||||||
|
public abstract interface class space/kscience/dataforge/context/PluginFactory : space/kscience/dataforge/context/Factory {
|
||||||
|
public static final field Companion Lspace/kscience/dataforge/context/PluginFactory$Companion;
|
||||||
public static final field TYPE Ljava/lang/String;
|
public static final field TYPE Ljava/lang/String;
|
||||||
public abstract fun getTag ()Lhep/dataforge/context/PluginTag;
|
public abstract fun getTag ()Lspace/kscience/dataforge/context/PluginTag;
|
||||||
public abstract fun getType ()Lkotlin/reflect/KClass;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public final class hep/dataforge/context/PluginFactory$Companion {
|
public final class space/kscience/dataforge/context/PluginFactory$Companion {
|
||||||
public static final field TYPE Ljava/lang/String;
|
public static final field TYPE Ljava/lang/String;
|
||||||
}
|
}
|
||||||
|
|
||||||
public final class hep/dataforge/context/PluginManager : hep/dataforge/context/ContextAware, java/lang/Iterable, kotlin/jvm/internal/markers/KMappedMarker {
|
public final class space/kscience/dataforge/context/PluginManager : java/lang/Iterable, kotlin/jvm/internal/markers/KMappedMarker, space/kscience/dataforge/context/ContextAware {
|
||||||
public fun <init> (Lhep/dataforge/context/Context;Ljava/util/Set;)V
|
public final fun find (ZLkotlin/jvm/functions/Function1;)Lspace/kscience/dataforge/context/Plugin;
|
||||||
public final fun fetch (Lhep/dataforge/context/PluginFactory;ZLhep/dataforge/meta/Meta;)Lhep/dataforge/context/Plugin;
|
public static synthetic fun find$default (Lspace/kscience/dataforge/context/PluginManager;ZLkotlin/jvm/functions/Function1;ILjava/lang/Object;)Lspace/kscience/dataforge/context/Plugin;
|
||||||
public final fun fetch (Lhep/dataforge/context/PluginFactory;ZLkotlin/jvm/functions/Function1;)Lhep/dataforge/context/Plugin;
|
public final fun get (Lspace/kscience/dataforge/context/PluginTag;Z)Lspace/kscience/dataforge/context/Plugin;
|
||||||
public static synthetic fun fetch$default (Lhep/dataforge/context/PluginManager;Lhep/dataforge/context/PluginFactory;ZLhep/dataforge/meta/Meta;ILjava/lang/Object;)Lhep/dataforge/context/Plugin;
|
public static synthetic fun get$default (Lspace/kscience/dataforge/context/PluginManager;Lspace/kscience/dataforge/context/PluginTag;ZILjava/lang/Object;)Lspace/kscience/dataforge/context/Plugin;
|
||||||
public static synthetic fun fetch$default (Lhep/dataforge/context/PluginManager;Lhep/dataforge/context/PluginFactory;ZLkotlin/jvm/functions/Function1;ILjava/lang/Object;)Lhep/dataforge/context/Plugin;
|
public final fun getByType (Lkotlin/reflect/KClass;Lspace/kscience/dataforge/context/PluginTag;Z)Ljava/lang/Object;
|
||||||
public final fun find (ZLkotlin/jvm/functions/Function1;)Lhep/dataforge/context/Plugin;
|
public static synthetic fun getByType$default (Lspace/kscience/dataforge/context/PluginManager;Lkotlin/reflect/KClass;Lspace/kscience/dataforge/context/PluginTag;ZILjava/lang/Object;)Ljava/lang/Object;
|
||||||
public static synthetic fun find$default (Lhep/dataforge/context/PluginManager;ZLkotlin/jvm/functions/Function1;ILjava/lang/Object;)Lhep/dataforge/context/Plugin;
|
public fun getContext ()Lspace/kscience/dataforge/context/Context;
|
||||||
public final fun get (Lhep/dataforge/context/PluginTag;Z)Lhep/dataforge/context/Plugin;
|
|
||||||
public final fun get (Lkotlin/reflect/KClass;Lhep/dataforge/context/PluginTag;Z)Ljava/lang/Object;
|
|
||||||
public static synthetic fun get$default (Lhep/dataforge/context/PluginManager;Lhep/dataforge/context/PluginTag;ZILjava/lang/Object;)Lhep/dataforge/context/Plugin;
|
|
||||||
public static synthetic fun get$default (Lhep/dataforge/context/PluginManager;Lkotlin/reflect/KClass;Lhep/dataforge/context/PluginTag;ZILjava/lang/Object;)Ljava/lang/Object;
|
|
||||||
public fun getContext ()Lhep/dataforge/context/Context;
|
|
||||||
public fun getLogger ()Lmu/KLogger;
|
|
||||||
public fun iterator ()Ljava/util/Iterator;
|
public fun iterator ()Ljava/util/Iterator;
|
||||||
public final fun list (Z)Ljava/util/Collection;
|
public final fun list (Z)Ljava/util/Collection;
|
||||||
public final fun load (Lhep/dataforge/context/Plugin;)Lhep/dataforge/context/Plugin;
|
|
||||||
public final fun load (Lhep/dataforge/context/PluginFactory;Lhep/dataforge/meta/Meta;)Lhep/dataforge/context/Plugin;
|
|
||||||
public final fun load (Lhep/dataforge/context/PluginFactory;Lkotlin/jvm/functions/Function1;)Lhep/dataforge/context/Plugin;
|
|
||||||
public static synthetic fun load$default (Lhep/dataforge/context/PluginManager;Lhep/dataforge/context/PluginFactory;Lhep/dataforge/meta/Meta;ILjava/lang/Object;)Lhep/dataforge/context/Plugin;
|
|
||||||
public final fun remove (Lhep/dataforge/context/Plugin;)V
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public final class hep/dataforge/context/PluginTag : hep/dataforge/meta/MetaRepr {
|
public final class space/kscience/dataforge/context/PluginTag : space/kscience/dataforge/meta/MetaRepr {
|
||||||
public static final field Companion Lhep/dataforge/context/PluginTag$Companion;
|
public static final field Companion Lspace/kscience/dataforge/context/PluginTag$Companion;
|
||||||
public static final field DATAFORGE_GROUP Ljava/lang/String;
|
public static final field DATAFORGE_GROUP Ljava/lang/String;
|
||||||
public fun <init> (Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;)V
|
public fun <init> (Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;)V
|
||||||
public synthetic fun <init> (Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;ILkotlin/jvm/internal/DefaultConstructorMarker;)V
|
public synthetic fun <init> (Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;ILkotlin/jvm/internal/DefaultConstructorMarker;)V
|
||||||
public final fun component1 ()Ljava/lang/String;
|
public final fun component1 ()Ljava/lang/String;
|
||||||
public final fun component2 ()Ljava/lang/String;
|
public final fun component2 ()Ljava/lang/String;
|
||||||
public final fun component3 ()Ljava/lang/String;
|
public final fun component3 ()Ljava/lang/String;
|
||||||
public final fun copy (Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;)Lhep/dataforge/context/PluginTag;
|
public final fun copy (Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;)Lspace/kscience/dataforge/context/PluginTag;
|
||||||
public static synthetic fun copy$default (Lhep/dataforge/context/PluginTag;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;ILjava/lang/Object;)Lhep/dataforge/context/PluginTag;
|
public static synthetic fun copy$default (Lspace/kscience/dataforge/context/PluginTag;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;ILjava/lang/Object;)Lspace/kscience/dataforge/context/PluginTag;
|
||||||
public fun equals (Ljava/lang/Object;)Z
|
public fun equals (Ljava/lang/Object;)Z
|
||||||
public final fun getGroup ()Ljava/lang/String;
|
public final fun getGroup ()Ljava/lang/String;
|
||||||
public final fun getName ()Ljava/lang/String;
|
public final fun getName ()Ljava/lang/String;
|
||||||
public final fun getVersion ()Ljava/lang/String;
|
public final fun getVersion ()Ljava/lang/String;
|
||||||
public fun hashCode ()I
|
public fun hashCode ()I
|
||||||
public final fun matches (Lhep/dataforge/context/PluginTag;)Z
|
public final fun matches (Lspace/kscience/dataforge/context/PluginTag;)Z
|
||||||
public fun toMeta ()Lhep/dataforge/meta/Meta;
|
public fun toMeta ()Lspace/kscience/dataforge/meta/Meta;
|
||||||
public fun toString ()Ljava/lang/String;
|
public fun toString ()Ljava/lang/String;
|
||||||
}
|
}
|
||||||
|
|
||||||
public final class hep/dataforge/context/PluginTag$Companion {
|
public final class space/kscience/dataforge/context/PluginTag$$serializer : kotlinx/serialization/internal/GeneratedSerializer {
|
||||||
public final fun fromString (Ljava/lang/String;)Lhep/dataforge/context/PluginTag;
|
public static final field INSTANCE Lspace/kscience/dataforge/context/PluginTag$$serializer;
|
||||||
|
public fun childSerializers ()[Lkotlinx/serialization/KSerializer;
|
||||||
|
public synthetic fun deserialize (Lkotlinx/serialization/encoding/Decoder;)Ljava/lang/Object;
|
||||||
|
public fun deserialize (Lkotlinx/serialization/encoding/Decoder;)Lspace/kscience/dataforge/context/PluginTag;
|
||||||
|
public fun getDescriptor ()Lkotlinx/serialization/descriptors/SerialDescriptor;
|
||||||
|
public synthetic fun serialize (Lkotlinx/serialization/encoding/Encoder;Ljava/lang/Object;)V
|
||||||
|
public fun serialize (Lkotlinx/serialization/encoding/Encoder;Lspace/kscience/dataforge/context/PluginTag;)V
|
||||||
|
public fun typeParametersSerializers ()[Lkotlinx/serialization/KSerializer;
|
||||||
}
|
}
|
||||||
|
|
||||||
public final class hep/dataforge/context/ResolveKt {
|
public final class space/kscience/dataforge/context/PluginTag$Companion {
|
||||||
public static final fun gather (Lhep/dataforge/context/Context;Ljava/lang/String;Lkotlin/reflect/KClass;Z)Ljava/util/Map;
|
public final fun fromString (Ljava/lang/String;)Lspace/kscience/dataforge/context/PluginTag;
|
||||||
public static synthetic fun gather$default (Lhep/dataforge/context/Context;Ljava/lang/String;Lkotlin/reflect/KClass;ZILjava/lang/Object;)Ljava/util/Map;
|
public final fun serializer ()Lkotlinx/serialization/KSerializer;
|
||||||
public static final fun gatherInSequence (Lhep/dataforge/context/Context;Ljava/lang/String;Lkotlin/reflect/KClass;Z)Lkotlin/sequences/Sequence;
|
}
|
||||||
public static synthetic fun gatherInSequence$default (Lhep/dataforge/context/Context;Ljava/lang/String;Lkotlin/reflect/KClass;ZILjava/lang/Object;)Lkotlin/sequences/Sequence;
|
|
||||||
|
public final class space/kscience/dataforge/context/ResolveKt {
|
||||||
|
public static final fun gather (Lspace/kscience/dataforge/context/Context;Ljava/lang/String;Lkotlin/reflect/KClass;Z)Ljava/util/Map;
|
||||||
|
public static synthetic fun gather$default (Lspace/kscience/dataforge/context/Context;Ljava/lang/String;Lkotlin/reflect/KClass;ZILjava/lang/Object;)Ljava/util/Map;
|
||||||
public static final fun getValues (Lkotlin/sequences/Sequence;)Lkotlin/sequences/Sequence;
|
public static final fun getValues (Lkotlin/sequences/Sequence;)Lkotlin/sequences/Sequence;
|
||||||
public static final fun resolve (Lhep/dataforge/context/Context;Ljava/lang/String;Lhep/dataforge/names/Name;Lkotlin/reflect/KClass;)Ljava/lang/Object;
|
public static final fun resolve (Lspace/kscience/dataforge/context/Context;Ljava/lang/String;Lspace/kscience/dataforge/names/Name;Lkotlin/reflect/KClass;)Ljava/lang/Object;
|
||||||
}
|
}
|
||||||
|
|
||||||
public abstract interface annotation class hep/dataforge/descriptors/Attribute : java/lang/annotation/Annotation {
|
public final class space/kscience/dataforge/context/SlfLogManager : space/kscience/dataforge/context/AbstractPlugin, space/kscience/dataforge/context/LogManager {
|
||||||
public abstract fun key ()Ljava/lang/String;
|
public static final field Companion Lspace/kscience/dataforge/context/SlfLogManager$Companion;
|
||||||
public abstract fun value ()Ljava/lang/String;
|
public fun <init> ()V
|
||||||
|
public fun getDefaultLogger ()Lspace/kscience/dataforge/context/Logger;
|
||||||
|
public fun getTag ()Lspace/kscience/dataforge/context/PluginTag;
|
||||||
|
public fun logger (Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/context/Logger;
|
||||||
}
|
}
|
||||||
|
|
||||||
public abstract interface annotation class hep/dataforge/descriptors/Attributes : java/lang/annotation/Annotation {
|
public final class space/kscience/dataforge/context/SlfLogManager$Companion : space/kscience/dataforge/context/PluginFactory {
|
||||||
public abstract fun attrs ()[Lhep/dataforge/descriptors/Attribute;
|
public synthetic fun build (Lspace/kscience/dataforge/context/Context;Lspace/kscience/dataforge/meta/Meta;)Ljava/lang/Object;
|
||||||
|
public fun build (Lspace/kscience/dataforge/context/Context;Lspace/kscience/dataforge/meta/Meta;)Lspace/kscience/dataforge/context/SlfLogManager;
|
||||||
|
public fun getTag ()Lspace/kscience/dataforge/context/PluginTag;
|
||||||
}
|
}
|
||||||
|
|
||||||
public abstract interface annotation class hep/dataforge/descriptors/ItemDef : java/lang/annotation/Annotation {
|
public final class space/kscience/dataforge/properties/PropertyKt {
|
||||||
public abstract fun info ()Ljava/lang/String;
|
|
||||||
public abstract fun multiple ()Z
|
|
||||||
public abstract fun required ()Z
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public abstract interface annotation class hep/dataforge/descriptors/ValueDef : java/lang/annotation/Annotation {
|
public final class space/kscience/dataforge/properties/SchemePropertyKt {
|
||||||
public abstract fun allowed ()[Ljava/lang/String;
|
|
||||||
public abstract fun def ()Ljava/lang/String;
|
|
||||||
public abstract fun enumeration ()Ljava/lang/Class;
|
|
||||||
public abstract fun type ()[Lhep/dataforge/values/ValueType;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public final class hep/dataforge/properties/ConfigProperty : hep/dataforge/properties/Property {
|
public final class space/kscience/dataforge/provider/DfTypeKt {
|
||||||
public fun <init> (Lhep/dataforge/meta/Config;Lhep/dataforge/names/Name;Lhep/dataforge/meta/transformations/MetaConverter;)V
|
|
||||||
public final fun getConfig ()Lhep/dataforge/meta/Config;
|
|
||||||
public final fun getConverter ()Lhep/dataforge/meta/transformations/MetaConverter;
|
|
||||||
public final fun getName ()Lhep/dataforge/names/Name;
|
|
||||||
public fun getValue ()Ljava/lang/Object;
|
|
||||||
public fun onChange (Ljava/lang/Object;Lkotlin/jvm/functions/Function1;)V
|
|
||||||
public fun removeChangeListener (Ljava/lang/Object;)V
|
|
||||||
public fun setValue (Ljava/lang/Object;)V
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public abstract interface class hep/dataforge/properties/Property {
|
public final class space/kscience/dataforge/provider/Path : java/lang/Iterable, kotlin/jvm/internal/markers/KMappedMarker {
|
||||||
public abstract fun getValue ()Ljava/lang/Object;
|
public static final field Companion Lspace/kscience/dataforge/provider/Path$Companion;
|
||||||
public abstract fun onChange (Ljava/lang/Object;Lkotlin/jvm/functions/Function1;)V
|
|
||||||
public abstract fun removeChangeListener (Ljava/lang/Object;)V
|
|
||||||
public abstract fun setValue (Ljava/lang/Object;)V
|
|
||||||
}
|
|
||||||
|
|
||||||
public final class hep/dataforge/properties/Property$DefaultImpls {
|
|
||||||
public static synthetic fun onChange$default (Lhep/dataforge/properties/Property;Ljava/lang/Object;Lkotlin/jvm/functions/Function1;ILjava/lang/Object;)V
|
|
||||||
public static synthetic fun removeChangeListener$default (Lhep/dataforge/properties/Property;Ljava/lang/Object;ILjava/lang/Object;)V
|
|
||||||
}
|
|
||||||
|
|
||||||
public final class hep/dataforge/properties/PropertyKt {
|
|
||||||
public static final fun bind (Lhep/dataforge/properties/Property;Lhep/dataforge/properties/Property;)V
|
|
||||||
public static final fun mirror (Lhep/dataforge/properties/Property;Lhep/dataforge/properties/Property;Lkotlinx/coroutines/CoroutineScope;)V
|
|
||||||
public static final fun toFlow (Lhep/dataforge/properties/Property;)Lkotlinx/coroutines/flow/StateFlow;
|
|
||||||
}
|
|
||||||
|
|
||||||
public final class hep/dataforge/provider/DfTypeKt {
|
|
||||||
public static final fun getDfType (Lkotlin/reflect/KClass;)Ljava/lang/String;
|
|
||||||
}
|
|
||||||
|
|
||||||
public final class hep/dataforge/provider/Path : java/lang/Iterable, kotlin/jvm/internal/markers/KMappedMarker {
|
|
||||||
public static final field Companion Lhep/dataforge/provider/Path$Companion;
|
|
||||||
public static final field PATH_SEGMENT_SEPARATOR Ljava/lang/String;
|
public static final field PATH_SEGMENT_SEPARATOR Ljava/lang/String;
|
||||||
public static final synthetic fun box-impl (Ljava/util/List;)Lhep/dataforge/provider/Path;
|
public static final synthetic fun box-impl (Ljava/util/List;)Lspace/kscience/dataforge/provider/Path;
|
||||||
public static fun constructor-impl (Ljava/util/List;)Ljava/util/List;
|
public static fun constructor-impl (Ljava/util/List;)Ljava/util/List;
|
||||||
public fun equals (Ljava/lang/Object;)Z
|
public fun equals (Ljava/lang/Object;)Z
|
||||||
public static fun equals-impl (Ljava/util/List;Ljava/lang/Object;)Z
|
public static fun equals-impl (Ljava/util/List;Ljava/lang/Object;)Z
|
||||||
public static final fun equals-impl0 (Ljava/util/List;Ljava/util/List;)Z
|
public static final fun equals-impl0 (Ljava/util/List;Ljava/util/List;)Z
|
||||||
public static final fun getHead-impl (Ljava/util/List;)Lhep/dataforge/provider/PathToken;
|
|
||||||
public static final fun getLength-impl (Ljava/util/List;)I
|
|
||||||
public static final fun getTail-e2ET3QM (Ljava/util/List;)Ljava/util/List;
|
|
||||||
public final fun getTokens ()Ljava/util/List;
|
public final fun getTokens ()Ljava/util/List;
|
||||||
public fun hashCode ()I
|
public fun hashCode ()I
|
||||||
public static fun hashCode-impl (Ljava/util/List;)I
|
public static fun hashCode-impl (Ljava/util/List;)I
|
||||||
@ -281,54 +276,52 @@ public final class hep/dataforge/provider/Path : java/lang/Iterable, kotlin/jvm/
|
|||||||
public final synthetic fun unbox-impl ()Ljava/util/List;
|
public final synthetic fun unbox-impl ()Ljava/util/List;
|
||||||
}
|
}
|
||||||
|
|
||||||
public final class hep/dataforge/provider/Path$Companion {
|
public final class space/kscience/dataforge/provider/Path$Companion {
|
||||||
public final fun parse-IN54j3k (Ljava/lang/String;)Ljava/util/List;
|
public final fun parse-X5wN5Vs (Ljava/lang/String;)Ljava/util/List;
|
||||||
}
|
}
|
||||||
|
|
||||||
public final class hep/dataforge/provider/PathKt {
|
public final class space/kscience/dataforge/provider/PathKt {
|
||||||
public static final fun plus-MQiGgVU (Ljava/util/List;Ljava/util/List;)Ljava/util/List;
|
public static final fun Path ([Lkotlin/Pair;)Ljava/util/List;
|
||||||
public static final fun toPath (Lhep/dataforge/provider/PathToken;)Ljava/util/List;
|
public static final fun Path ([Lspace/kscience/dataforge/names/Name;)Ljava/util/List;
|
||||||
|
public static final fun asPath (Lspace/kscience/dataforge/names/Name;Ljava/lang/String;)Ljava/util/List;
|
||||||
|
public static final fun asPath (Lspace/kscience/dataforge/provider/PathToken;)Ljava/util/List;
|
||||||
|
public static synthetic fun asPath$default (Lspace/kscience/dataforge/names/Name;Ljava/lang/String;ILjava/lang/Object;)Ljava/util/List;
|
||||||
|
public static final fun getHead-LGjlSZY (Ljava/util/List;)Lspace/kscience/dataforge/provider/PathToken;
|
||||||
|
public static final fun getLength-LGjlSZY (Ljava/util/List;)I
|
||||||
|
public static final fun getTail-LGjlSZY (Ljava/util/List;)Ljava/util/List;
|
||||||
|
public static final fun plus-sn2Gq0g (Ljava/util/List;Ljava/util/List;)Ljava/util/List;
|
||||||
}
|
}
|
||||||
|
|
||||||
public final class hep/dataforge/provider/PathToken {
|
public final class space/kscience/dataforge/provider/PathToken {
|
||||||
public static final field Companion Lhep/dataforge/provider/PathToken$Companion;
|
public static final field Companion Lspace/kscience/dataforge/provider/PathToken$Companion;
|
||||||
public static final field TARGET_SEPARATOR Ljava/lang/String;
|
public static final field TARGET_SEPARATOR Ljava/lang/String;
|
||||||
public fun <init> (Lhep/dataforge/names/Name;Ljava/lang/String;)V
|
public fun <init> (Lspace/kscience/dataforge/names/Name;Ljava/lang/String;)V
|
||||||
public synthetic fun <init> (Lhep/dataforge/names/Name;Ljava/lang/String;ILkotlin/jvm/internal/DefaultConstructorMarker;)V
|
public synthetic fun <init> (Lspace/kscience/dataforge/names/Name;Ljava/lang/String;ILkotlin/jvm/internal/DefaultConstructorMarker;)V
|
||||||
public final fun component1 ()Lhep/dataforge/names/Name;
|
public final fun component1 ()Lspace/kscience/dataforge/names/Name;
|
||||||
public final fun component2 ()Ljava/lang/String;
|
public final fun component2 ()Ljava/lang/String;
|
||||||
public final fun copy (Lhep/dataforge/names/Name;Ljava/lang/String;)Lhep/dataforge/provider/PathToken;
|
public final fun copy (Lspace/kscience/dataforge/names/Name;Ljava/lang/String;)Lspace/kscience/dataforge/provider/PathToken;
|
||||||
public static synthetic fun copy$default (Lhep/dataforge/provider/PathToken;Lhep/dataforge/names/Name;Ljava/lang/String;ILjava/lang/Object;)Lhep/dataforge/provider/PathToken;
|
public static synthetic fun copy$default (Lspace/kscience/dataforge/provider/PathToken;Lspace/kscience/dataforge/names/Name;Ljava/lang/String;ILjava/lang/Object;)Lspace/kscience/dataforge/provider/PathToken;
|
||||||
public fun equals (Ljava/lang/Object;)Z
|
public fun equals (Ljava/lang/Object;)Z
|
||||||
public final fun getName ()Lhep/dataforge/names/Name;
|
public final fun getName ()Lspace/kscience/dataforge/names/Name;
|
||||||
public final fun getTarget ()Ljava/lang/String;
|
public final fun getTarget ()Ljava/lang/String;
|
||||||
public fun hashCode ()I
|
public fun hashCode ()I
|
||||||
public fun toString ()Ljava/lang/String;
|
public fun toString ()Ljava/lang/String;
|
||||||
}
|
}
|
||||||
|
|
||||||
public final class hep/dataforge/provider/PathToken$Companion {
|
public final class space/kscience/dataforge/provider/PathToken$Companion {
|
||||||
public final fun parse (Ljava/lang/String;)Lhep/dataforge/provider/PathToken;
|
public final fun parse (Ljava/lang/String;Z)Lspace/kscience/dataforge/provider/PathToken;
|
||||||
|
public static synthetic fun parse$default (Lspace/kscience/dataforge/provider/PathToken$Companion;Ljava/lang/String;ZILjava/lang/Object;)Lspace/kscience/dataforge/provider/PathToken;
|
||||||
}
|
}
|
||||||
|
|
||||||
public abstract interface class hep/dataforge/provider/Provider {
|
public abstract interface class space/kscience/dataforge/provider/Provider {
|
||||||
public abstract fun content (Ljava/lang/String;)Ljava/util/Map;
|
public fun content (Ljava/lang/String;)Ljava/util/Map;
|
||||||
public abstract fun getDefaultChainTarget ()Ljava/lang/String;
|
public fun getDefaultChainTarget ()Ljava/lang/String;
|
||||||
public abstract fun getDefaultTarget ()Ljava/lang/String;
|
public fun getDefaultTarget ()Ljava/lang/String;
|
||||||
}
|
}
|
||||||
|
|
||||||
public final class hep/dataforge/provider/Provider$DefaultImpls {
|
public final class space/kscience/dataforge/provider/ProviderKt {
|
||||||
public static fun content (Lhep/dataforge/provider/Provider;Ljava/lang/String;)Ljava/util/Map;
|
public static final fun provide-CSkoCSg (Lspace/kscience/dataforge/provider/Provider;Ljava/util/List;Ljava/lang/String;)Ljava/lang/Object;
|
||||||
public static fun getDefaultChainTarget (Lhep/dataforge/provider/Provider;)Ljava/lang/String;
|
public static synthetic fun provide-CSkoCSg$default (Lspace/kscience/dataforge/provider/Provider;Ljava/util/List;Ljava/lang/String;ILjava/lang/Object;)Ljava/lang/Object;
|
||||||
public static fun getDefaultTarget (Lhep/dataforge/provider/Provider;)Ljava/lang/String;
|
public static final fun top (Lspace/kscience/dataforge/provider/Provider;Ljava/lang/String;Lkotlin/reflect/KClass;)Ljava/util/Map;
|
||||||
}
|
|
||||||
|
|
||||||
public final class hep/dataforge/provider/ProviderKt {
|
|
||||||
public static final fun provide-0Dbucg0 (Lhep/dataforge/provider/Provider;Ljava/util/List;Ljava/lang/String;)Ljava/lang/Object;
|
|
||||||
public static synthetic fun provide-0Dbucg0$default (Lhep/dataforge/provider/Provider;Ljava/util/List;Ljava/lang/String;ILjava/lang/Object;)Ljava/lang/Object;
|
|
||||||
public static final fun top (Lhep/dataforge/provider/Provider;Ljava/lang/String;Lkotlin/reflect/KClass;)Ljava/util/Map;
|
|
||||||
}
|
|
||||||
|
|
||||||
public abstract interface annotation class hep/dataforge/provider/Type : java/lang/annotation/Annotation {
|
|
||||||
public abstract fun id ()Ljava/lang/String;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,31 +1,24 @@
|
|||||||
plugins {
|
plugins {
|
||||||
id("ru.mipt.npm.mpp")
|
id("space.kscience.gradle.mpp")
|
||||||
id("ru.mipt.npm.native")
|
|
||||||
}
|
}
|
||||||
|
|
||||||
description = "Context and provider definitions"
|
description = "Context and provider definitions"
|
||||||
|
|
||||||
kscience {
|
kscience {
|
||||||
|
jvm()
|
||||||
|
js()
|
||||||
|
native()
|
||||||
useCoroutines()
|
useCoroutines()
|
||||||
|
useSerialization()
|
||||||
|
dependencies {
|
||||||
|
api(project(":dataforge-meta"))
|
||||||
|
}
|
||||||
|
dependencies(jvmMain){
|
||||||
|
api(kotlin("reflect"))
|
||||||
|
api("org.slf4j:slf4j-api:1.7.30")
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
kotlin {
|
readme {
|
||||||
sourceSets {
|
maturity = space.kscience.gradle.Maturity.DEVELOPMENT
|
||||||
val commonMain by getting {
|
|
||||||
dependencies {
|
|
||||||
api(project(":dataforge-meta"))
|
|
||||||
api("io.github.microutils:kotlin-logging:1.9.0-dev-npm-2")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
val jvmMain by getting {
|
|
||||||
dependencies {
|
|
||||||
api(kotlin("reflect"))
|
|
||||||
api("ch.qos.logback:logback-classic:1.2.3")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
val jsMain by getting {
|
|
||||||
dependencies {
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
@ -1,41 +0,0 @@
|
|||||||
package hep.dataforge.context
|
|
||||||
|
|
||||||
import hep.dataforge.meta.Meta
|
|
||||||
import hep.dataforge.names.Name
|
|
||||||
import kotlin.properties.ReadOnlyProperty
|
|
||||||
import kotlin.reflect.KClass
|
|
||||||
import kotlin.reflect.KProperty
|
|
||||||
|
|
||||||
public abstract class AbstractPlugin(override val meta: Meta = Meta.EMPTY) : Plugin {
|
|
||||||
private var _context: Context? = null
|
|
||||||
private val dependencies = ArrayList<PluginFactory<*>>()
|
|
||||||
|
|
||||||
override val context: Context
|
|
||||||
get() = _context ?: error("Plugin $tag is not attached")
|
|
||||||
|
|
||||||
override fun attach(context: Context) {
|
|
||||||
this._context = context
|
|
||||||
}
|
|
||||||
|
|
||||||
override fun detach() {
|
|
||||||
this._context = null
|
|
||||||
}
|
|
||||||
|
|
||||||
final override fun dependsOn(): List<PluginFactory<*>> = dependencies
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Register plugin dependency and return a delegate which provides lazily initialized reference to dependent plugin
|
|
||||||
*/
|
|
||||||
protected fun <P : Plugin> require(factory: PluginFactory<P>): ReadOnlyProperty<AbstractPlugin, P> {
|
|
||||||
dependencies.add(factory)
|
|
||||||
return PluginDependencyDelegate(factory.type)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public fun <T : Named> Collection<T>.toMap(): Map<Name, T> = associate { it.name to it }
|
|
||||||
|
|
||||||
private class PluginDependencyDelegate<P : Plugin>(val type: KClass<out P>) : ReadOnlyProperty<AbstractPlugin, P> {
|
|
||||||
override fun getValue(thisRef: AbstractPlugin, property: KProperty<*>): P {
|
|
||||||
return thisRef.context.plugins[type] ?: error("Plugin with type $type not found")
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,44 +0,0 @@
|
|||||||
package hep.dataforge.context
|
|
||||||
|
|
||||||
import hep.dataforge.meta.*
|
|
||||||
import hep.dataforge.names.toName
|
|
||||||
|
|
||||||
/**
|
|
||||||
* A convenience builder for context
|
|
||||||
*/
|
|
||||||
@DFBuilder
|
|
||||||
public class ContextBuilder(private val parent: Context = Global, public var name: String = "@anonymous") {
|
|
||||||
private val plugins = HashSet<Plugin>()
|
|
||||||
private var meta = MetaBuilder()
|
|
||||||
|
|
||||||
public fun properties(action: MetaBuilder.() -> Unit) {
|
|
||||||
meta.action()
|
|
||||||
}
|
|
||||||
|
|
||||||
public fun plugin(plugin: Plugin) {
|
|
||||||
plugins.add(plugin)
|
|
||||||
}
|
|
||||||
|
|
||||||
@OptIn(DFExperimental::class)
|
|
||||||
private fun findPluginFactory(tag: PluginTag): PluginFactory<*> =
|
|
||||||
parent.gatherInSequence<PluginFactory<*>>(PluginFactory.TYPE).values
|
|
||||||
.find { it.tag.matches(tag) } ?: error("Can't resolve plugin factory for $tag")
|
|
||||||
|
|
||||||
public fun plugin(tag: PluginTag, metaBuilder: MetaBuilder.() -> Unit = {}) {
|
|
||||||
val factory = findPluginFactory(tag)
|
|
||||||
val plugin = factory.invoke(Meta(metaBuilder), parent)
|
|
||||||
plugins.add(plugin)
|
|
||||||
}
|
|
||||||
|
|
||||||
public fun plugin(builder: PluginFactory<*>, action: MetaBuilder.() -> Unit = {}) {
|
|
||||||
plugins.add(builder.invoke(Meta(action)))
|
|
||||||
}
|
|
||||||
|
|
||||||
public fun plugin(name: String, group: String = "", version: String = "", action: MetaBuilder.() -> Unit = {}) {
|
|
||||||
plugin(PluginTag(name, group, version), action)
|
|
||||||
}
|
|
||||||
|
|
||||||
public fun build(): Context {
|
|
||||||
return Context(name.toName(), parent, meta.seal(), plugins)
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,7 +0,0 @@
|
|||||||
package hep.dataforge.context
|
|
||||||
|
|
||||||
import hep.dataforge.meta.Meta
|
|
||||||
|
|
||||||
public interface Factory<out T : Any> {
|
|
||||||
public operator fun invoke(meta: Meta = Meta.EMPTY, context: Context = Global): T
|
|
||||||
}
|
|
@ -1,48 +0,0 @@
|
|||||||
package hep.dataforge.context
|
|
||||||
|
|
||||||
import hep.dataforge.meta.Meta
|
|
||||||
import hep.dataforge.names.asName
|
|
||||||
import kotlinx.coroutines.GlobalScope
|
|
||||||
import kotlinx.coroutines.SupervisorJob
|
|
||||||
import kotlin.coroutines.CoroutineContext
|
|
||||||
import kotlin.native.concurrent.ThreadLocal
|
|
||||||
|
|
||||||
/**
|
|
||||||
* A global root context. Closing [Global] terminates the framework.
|
|
||||||
*/
|
|
||||||
@ThreadLocal
|
|
||||||
public object Global : Context("GLOBAL".asName(), null, Meta.EMPTY) {
|
|
||||||
|
|
||||||
override val coroutineContext: CoroutineContext = GlobalScope.coroutineContext + SupervisorJob()
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Closing all contexts
|
|
||||||
*
|
|
||||||
* @throws Exception
|
|
||||||
*/
|
|
||||||
override fun close() {
|
|
||||||
logger.info { "Shutting down GLOBAL" }
|
|
||||||
for (ctx in contextRegistry.values) {
|
|
||||||
ctx.close()
|
|
||||||
}
|
|
||||||
super.close()
|
|
||||||
}
|
|
||||||
|
|
||||||
private val contextRegistry = HashMap<String, Context>()
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get previously built context
|
|
||||||
*
|
|
||||||
* @param name
|
|
||||||
* @return
|
|
||||||
*/
|
|
||||||
public fun getContext(name: String): Context? {
|
|
||||||
return contextRegistry[name]
|
|
||||||
}
|
|
||||||
|
|
||||||
public fun context(name: String, parent: Context = this, block: ContextBuilder.() -> Unit = {}): Context =
|
|
||||||
ContextBuilder(parent, name).apply(block).build().also {
|
|
||||||
contextRegistry[name] = it
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
@ -1,154 +0,0 @@
|
|||||||
package hep.dataforge.context
|
|
||||||
|
|
||||||
import hep.dataforge.meta.Meta
|
|
||||||
import hep.dataforge.meta.MetaBuilder
|
|
||||||
import hep.dataforge.provider.Type
|
|
||||||
import kotlin.reflect.KClass
|
|
||||||
|
|
||||||
|
|
||||||
@Type(PluginFactory.TYPE)
|
|
||||||
public interface PluginFactory<T : Plugin> : Factory<T> {
|
|
||||||
public val tag: PluginTag
|
|
||||||
public val type: KClass<out T>
|
|
||||||
|
|
||||||
public companion object {
|
|
||||||
public const val TYPE: String = "pluginFactory"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* The manager for plugin system. Should monitor plugin dependencies and locks.
|
|
||||||
*
|
|
||||||
* @property context A context for this plugin manager
|
|
||||||
* @author Alexander Nozik
|
|
||||||
*/
|
|
||||||
public class PluginManager(override val context: Context, plugins: Set<Plugin>) : ContextAware, Iterable<Plugin> {
|
|
||||||
|
|
||||||
//TODO refactor to read-only container
|
|
||||||
|
|
||||||
/**
|
|
||||||
* A set of loaded plugins
|
|
||||||
*/
|
|
||||||
private val plugins: HashSet<Plugin> = HashSet(plugins)
|
|
||||||
|
|
||||||
init {
|
|
||||||
plugins.forEach { it.attach(context) }
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* A [PluginManager] of parent context if it is present
|
|
||||||
*/
|
|
||||||
private val parent: PluginManager? = context.parent?.plugins
|
|
||||||
|
|
||||||
/**
|
|
||||||
* List plugins stored in this [PluginManager]. If [inherit] is true, include parent plugins as well
|
|
||||||
*/
|
|
||||||
public fun list(inherit: Boolean): Collection<Plugin> {
|
|
||||||
return if (inherit && parent != null) {
|
|
||||||
plugins + parent.list(true)
|
|
||||||
} else {
|
|
||||||
plugins
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get existing plugin or return null if not present. Only first matching plugin is returned.
|
|
||||||
* @param inherit search for parent [PluginManager] plugins
|
|
||||||
* @param predicate condition for the plugin
|
|
||||||
*/
|
|
||||||
public fun find(inherit: Boolean = true, predicate: (Plugin) -> Boolean): Plugin? =
|
|
||||||
list(inherit).find(predicate)
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Find a loaded plugin via its tag
|
|
||||||
*
|
|
||||||
* @param tag
|
|
||||||
* @return
|
|
||||||
*/
|
|
||||||
public operator fun get(tag: PluginTag, inherit: Boolean = true): Plugin? =
|
|
||||||
find(inherit) { tag.matches(it.tag) }
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Find a loaded plugin via its class. This method does not check if the result is unique and just returns first
|
|
||||||
* plugin matching the class condition.
|
|
||||||
* For safe search provide a tag since tags are checked on load and plugins with the same tag are not allowed
|
|
||||||
* in the same context.
|
|
||||||
*
|
|
||||||
* @param tag
|
|
||||||
* @param type
|
|
||||||
* @param <T>
|
|
||||||
* @return
|
|
||||||
*/
|
|
||||||
@Suppress("UNCHECKED_CAST")
|
|
||||||
public operator fun <T : Any> get(type: KClass<out T>, tag: PluginTag? = null, recursive: Boolean = true): T? =
|
|
||||||
find(recursive) { type.isInstance(it) && (tag == null || tag.matches(it.tag)) } as T?
|
|
||||||
|
|
||||||
public inline operator fun <reified T : Any> get(tag: PluginTag? = null, recursive: Boolean = true): T? =
|
|
||||||
get(T::class, tag, recursive)
|
|
||||||
|
|
||||||
public inline operator fun <reified T : Plugin> get(factory: PluginFactory<T>, recursive: Boolean = true): T? =
|
|
||||||
get(factory.type, factory.tag, recursive)
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Load given plugin into this manager and return loaded instance.
|
|
||||||
* Throw error if plugin of the same type and tag already exists in manager.
|
|
||||||
*
|
|
||||||
* @param plugin
|
|
||||||
* @return
|
|
||||||
*/
|
|
||||||
public fun <T : Plugin> load(plugin: T): T {
|
|
||||||
if (get(plugin::class, plugin.tag, recursive = false) != null) {
|
|
||||||
error("Plugin with tag ${plugin.tag} already exists in ${context.name}")
|
|
||||||
} else {
|
|
||||||
for (tag in plugin.dependsOn()) {
|
|
||||||
fetch(tag, true)
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.info { "Loading plugin ${plugin.name} into ${context.name}" }
|
|
||||||
plugin.attach(context)
|
|
||||||
plugins.add(plugin)
|
|
||||||
return plugin
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Load a plugin using its factory
|
|
||||||
*/
|
|
||||||
public fun <T : Plugin> load(factory: PluginFactory<T>, meta: Meta = Meta.EMPTY): T =
|
|
||||||
load(factory(meta, context))
|
|
||||||
|
|
||||||
public fun <T : Plugin> load(factory: PluginFactory<T>, metaBuilder: MetaBuilder.() -> Unit): T =
|
|
||||||
load(factory, Meta(metaBuilder))
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Remove a plugin from [PluginManager]
|
|
||||||
*/
|
|
||||||
public fun remove(plugin: Plugin) {
|
|
||||||
if (plugins.contains(plugin)) {
|
|
||||||
logger.info { "Removing plugin ${plugin.name} from ${context.name}" }
|
|
||||||
plugin.detach()
|
|
||||||
plugins.remove(plugin)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get an existing plugin with given meta or load new one using provided factory
|
|
||||||
*/
|
|
||||||
public fun <T : Plugin> fetch(factory: PluginFactory<T>, recursive: Boolean = true, meta: Meta = Meta.EMPTY): T {
|
|
||||||
val loaded = get(factory.type, factory.tag, recursive)
|
|
||||||
return when {
|
|
||||||
loaded == null -> load(factory(meta, context))
|
|
||||||
loaded.meta == meta -> loaded // if meta is the same, return existing plugin
|
|
||||||
else -> throw RuntimeException("Can't load plugin with tag ${factory.tag}. Plugin with this tag and different configuration already exists in context.")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public fun <T : Plugin> fetch(
|
|
||||||
factory: PluginFactory<T>,
|
|
||||||
recursive: Boolean = true,
|
|
||||||
metaBuilder: MetaBuilder.() -> Unit,
|
|
||||||
): T = fetch(factory, recursive, Meta(metaBuilder))
|
|
||||||
|
|
||||||
override fun iterator(): Iterator<Plugin> = plugins.iterator()
|
|
||||||
|
|
||||||
}
|
|
@ -1,33 +0,0 @@
|
|||||||
package hep.dataforge.properties
|
|
||||||
|
|
||||||
import hep.dataforge.meta.Config
|
|
||||||
import hep.dataforge.meta.DFExperimental
|
|
||||||
import hep.dataforge.meta.get
|
|
||||||
import hep.dataforge.meta.transformations.MetaConverter
|
|
||||||
import hep.dataforge.meta.transformations.nullableItemToObject
|
|
||||||
import hep.dataforge.meta.transformations.nullableObjectToMetaItem
|
|
||||||
import hep.dataforge.names.Name
|
|
||||||
|
|
||||||
@DFExperimental
|
|
||||||
public class ConfigProperty<T : Any>(
|
|
||||||
public val config: Config,
|
|
||||||
public val name: Name,
|
|
||||||
public val converter: MetaConverter<T>,
|
|
||||||
) : Property<T?> {
|
|
||||||
|
|
||||||
override var value: T?
|
|
||||||
get() = converter.nullableItemToObject(config[name])
|
|
||||||
set(value) {
|
|
||||||
config.setItem(name,converter.nullableObjectToMetaItem(value))
|
|
||||||
}
|
|
||||||
|
|
||||||
override fun onChange(owner: Any?, callback: (T?) -> Unit) {
|
|
||||||
config.onChange(owner) { name, oldItem, newItem ->
|
|
||||||
if (name == this.name && oldItem != newItem) callback(converter.nullableItemToObject(newItem))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
override fun removeChangeListener(owner: Any?) {
|
|
||||||
config.removeListener(owner)
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,75 +0,0 @@
|
|||||||
/*
|
|
||||||
* Copyright 2015 Alexander Nozik.
|
|
||||||
*
|
|
||||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
* you may not use this file except in compliance with the License.
|
|
||||||
* You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing, software
|
|
||||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
* See the License for the specific language governing permissions and
|
|
||||||
* limitations under the License.
|
|
||||||
*/
|
|
||||||
package hep.dataforge.provider
|
|
||||||
|
|
||||||
import hep.dataforge.names.Name
|
|
||||||
import hep.dataforge.names.toName
|
|
||||||
|
|
||||||
/**
|
|
||||||
*
|
|
||||||
*
|
|
||||||
* Path interface.
|
|
||||||
*
|
|
||||||
* @author Alexander Nozik
|
|
||||||
* @version $Id: $Id
|
|
||||||
*/
|
|
||||||
public inline class Path(public val tokens: List<PathToken>) : Iterable<PathToken> {
|
|
||||||
|
|
||||||
public val head: PathToken? get() = tokens.firstOrNull()
|
|
||||||
|
|
||||||
public val length: Int get() = tokens.size
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Returns non-empty optional containing the chain without first segment in case of chain path.
|
|
||||||
*
|
|
||||||
* @return
|
|
||||||
*/
|
|
||||||
public val tail: Path? get() = if (tokens.isEmpty()) null else Path(tokens.drop(1))
|
|
||||||
|
|
||||||
override fun iterator(): Iterator<PathToken> = tokens.iterator()
|
|
||||||
|
|
||||||
public companion object {
|
|
||||||
public const val PATH_SEGMENT_SEPARATOR: String = "/"
|
|
||||||
|
|
||||||
public fun parse(path: String): Path {
|
|
||||||
val head = path.substringBefore(PATH_SEGMENT_SEPARATOR)
|
|
||||||
val tail = path.substringAfter(PATH_SEGMENT_SEPARATOR)
|
|
||||||
return PathToken.parse(head).toPath() + parse(tail)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public operator fun Path.plus(path: Path): Path = Path(this.tokens + path.tokens)
|
|
||||||
|
|
||||||
public data class PathToken(val name: Name, val target: String? = null) {
|
|
||||||
override fun toString(): String = if (target == null) {
|
|
||||||
name.toString()
|
|
||||||
} else {
|
|
||||||
"$target$TARGET_SEPARATOR$name"
|
|
||||||
}
|
|
||||||
|
|
||||||
public companion object {
|
|
||||||
public const val TARGET_SEPARATOR: String = "::"
|
|
||||||
public fun parse(token: String): PathToken {
|
|
||||||
val target = token.substringBefore(TARGET_SEPARATOR, "")
|
|
||||||
val name = token.substringAfter(TARGET_SEPARATOR).toName()
|
|
||||||
if (target.contains("[")) TODO("target separators in queries are not supported")
|
|
||||||
return PathToken(name, target)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public fun PathToken.toPath(): Path = Path(listOf(this))
|
|
@ -1,10 +0,0 @@
|
|||||||
package hep.dataforge.provider
|
|
||||||
|
|
||||||
/**
|
|
||||||
* A text label for internal DataForge type classification. Alternative for mime container type.
|
|
||||||
*
|
|
||||||
* The DataForge type notation presumes that type `A.B.C` is the subtype of `A.B`
|
|
||||||
*/
|
|
||||||
@MustBeDocumented
|
|
||||||
@Target(AnnotationTarget.CLASS)
|
|
||||||
public annotation class Type(val id: String)
|
|
@ -0,0 +1,57 @@
|
|||||||
|
package space.kscience.dataforge.context
|
||||||
|
|
||||||
|
import space.kscience.dataforge.meta.Meta
|
||||||
|
import space.kscience.dataforge.misc.DFInternal
|
||||||
|
import space.kscience.dataforge.misc.Named
|
||||||
|
import space.kscience.dataforge.names.Name
|
||||||
|
import kotlin.properties.ReadOnlyProperty
|
||||||
|
import kotlin.reflect.KClass
|
||||||
|
import kotlin.reflect.KProperty
|
||||||
|
|
||||||
|
public abstract class AbstractPlugin(override val meta: Meta = Meta.EMPTY) : Plugin {
|
||||||
|
private var _context: Context? = null
|
||||||
|
private val dependencies = HashMap<PluginFactory<*>, Meta>()
|
||||||
|
|
||||||
|
override val isAttached: Boolean get() = _context != null
|
||||||
|
|
||||||
|
override val context: Context
|
||||||
|
get() = _context ?: error("Plugin $tag is not attached")
|
||||||
|
|
||||||
|
override fun attach(context: Context) {
|
||||||
|
this._context = context
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun detach() {
|
||||||
|
this._context = null
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun dependsOn(): Map<PluginFactory<*>, Meta> = dependencies
|
||||||
|
|
||||||
|
protected fun <P : Plugin> require(
|
||||||
|
factory: PluginFactory<P>,
|
||||||
|
type: KClass<P>,
|
||||||
|
meta: Meta = Meta.EMPTY,
|
||||||
|
): ReadOnlyProperty<AbstractPlugin, P> {
|
||||||
|
dependencies[factory] = meta
|
||||||
|
return PluginDependencyDelegate(factory, type)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Register plugin dependency and return a delegate which provides lazily initialized reference to dependent plugin
|
||||||
|
*/
|
||||||
|
protected inline fun <reified P : Plugin> require(
|
||||||
|
factory: PluginFactory<P>,
|
||||||
|
meta: Meta = Meta.EMPTY,
|
||||||
|
): ReadOnlyProperty<AbstractPlugin, P> = require(factory, P::class, meta)
|
||||||
|
}
|
||||||
|
|
||||||
|
public fun <T : Named> Collection<T>.associateByName(): Map<Name, T> = associate { it.name to it }
|
||||||
|
|
||||||
|
private class PluginDependencyDelegate<P : Plugin>(val factory: PluginFactory<P>, val type: KClass<P>) :
|
||||||
|
ReadOnlyProperty<AbstractPlugin, P> {
|
||||||
|
@OptIn(DFInternal::class)
|
||||||
|
override fun getValue(thisRef: AbstractPlugin, property: KProperty<*>): P {
|
||||||
|
if (!thisRef.isAttached) error("Plugin dependency must not be called eagerly during initialization.")
|
||||||
|
return thisRef.context.plugins.getByType(type, factory.tag) ?: error("Plugin ${factory.tag} not found")
|
||||||
|
}
|
||||||
|
}
|
@ -1,17 +1,14 @@
|
|||||||
package hep.dataforge.context
|
package space.kscience.dataforge.context
|
||||||
|
|
||||||
import hep.dataforge.meta.Laminate
|
|
||||||
import hep.dataforge.meta.Meta
|
|
||||||
import hep.dataforge.meta.MetaRepr
|
|
||||||
import hep.dataforge.meta.sequence
|
|
||||||
import hep.dataforge.names.Name
|
|
||||||
import hep.dataforge.names.plus
|
|
||||||
import hep.dataforge.provider.Provider
|
|
||||||
import kotlinx.coroutines.CoroutineScope
|
import kotlinx.coroutines.CoroutineScope
|
||||||
import kotlinx.coroutines.Job
|
import kotlinx.coroutines.Job
|
||||||
import kotlinx.coroutines.SupervisorJob
|
import kotlinx.coroutines.SupervisorJob
|
||||||
import mu.KLogger
|
import space.kscience.dataforge.meta.*
|
||||||
import mu.KotlinLogging
|
import space.kscience.dataforge.misc.DFExperimental
|
||||||
|
import space.kscience.dataforge.misc.Named
|
||||||
|
import space.kscience.dataforge.misc.ThreadSafe
|
||||||
|
import space.kscience.dataforge.names.Name
|
||||||
|
import space.kscience.dataforge.provider.Provider
|
||||||
import kotlin.coroutines.CoroutineContext
|
import kotlin.coroutines.CoroutineContext
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -24,44 +21,40 @@ import kotlin.coroutines.CoroutineContext
|
|||||||
* be overridden by plugin implementation.
|
* be overridden by plugin implementation.
|
||||||
*
|
*
|
||||||
*/
|
*/
|
||||||
public open class Context(
|
public open class Context internal constructor(
|
||||||
final override val name: Name,
|
final override val name: Name,
|
||||||
public val parent: Context?,
|
public val parent: Context?,
|
||||||
|
plugins: Set<Plugin>, // set of unattached plugins
|
||||||
meta: Meta,
|
meta: Meta,
|
||||||
plugins: Set<Plugin> = emptySet(),
|
|
||||||
) : Named, MetaRepr, Provider, CoroutineScope {
|
) : Named, MetaRepr, Provider, CoroutineScope {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Context properties. Working as substitute for environment variables
|
* Context properties. Working as substitute for environment variables
|
||||||
*/
|
*/
|
||||||
private val properties: Laminate = if (parent == null) {
|
public val properties: Laminate = if (parent == null) {
|
||||||
Laminate(meta)
|
Laminate(meta)
|
||||||
} else {
|
} else {
|
||||||
Laminate(meta, parent.properties)
|
Laminate(meta, parent.properties)
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Context logger
|
|
||||||
*/
|
|
||||||
public val logger: KLogger = KotlinLogging.logger(name.toString())
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A [PluginManager] for current context
|
* A [PluginManager] for current context
|
||||||
*/
|
*/
|
||||||
public val plugins: PluginManager by lazy { PluginManager(this, plugins)}
|
public val plugins: PluginManager by lazy { PluginManager(this, plugins) }
|
||||||
|
|
||||||
override val defaultTarget: String get() = Plugin.TARGET
|
override val defaultTarget: String get() = Plugin.TARGET
|
||||||
|
|
||||||
public fun content(target: String, inherit: Boolean): Map<Name, Any> {
|
public fun content(target: String, inherit: Boolean): Map<Name, Any> {
|
||||||
return if (inherit) {
|
return if (inherit) {
|
||||||
when (target) {
|
when (target) {
|
||||||
PROPERTY_TARGET -> properties.sequence().toMap()
|
PROPERTY_TARGET -> properties.nodeSequence().toMap()
|
||||||
Plugin.TARGET -> plugins.list(true).associateBy { it.name }
|
Plugin.TARGET -> plugins.list(true).associateBy { it.name }
|
||||||
else -> emptyMap()
|
else -> emptyMap()
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
when (target) {
|
when (target) {
|
||||||
PROPERTY_TARGET -> properties.layers.firstOrNull()?.sequence()?.toMap() ?: emptyMap()
|
PROPERTY_TARGET -> properties.layers.firstOrNull()?.nodeSequence()?.toMap() ?: emptyMap()
|
||||||
Plugin.TARGET -> plugins.list(false).associateBy { it.name }
|
Plugin.TARGET -> plugins.list(false).associateBy { it.name }
|
||||||
else -> emptyMap()
|
else -> emptyMap()
|
||||||
}
|
}
|
||||||
@ -76,18 +69,35 @@ public open class Context(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private val childrenContexts = HashMap<Name, Context>()
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Detach all plugins and terminate context
|
* Get and validate existing context or build and register a new child context.
|
||||||
|
* @param name the relative (tail) name of the new context. If null, uses context hash code as a marker.
|
||||||
|
*/
|
||||||
|
@OptIn(DFExperimental::class)
|
||||||
|
@ThreadSafe
|
||||||
|
public fun buildContext(name: Name? = null, block: ContextBuilder.() -> Unit = {}): Context {
|
||||||
|
val existing = name?.let { childrenContexts[name] }
|
||||||
|
return existing?.modify(block) ?: ContextBuilder(this, name).apply(block).build().also {
|
||||||
|
childrenContexts[it.name] = it
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Detach all plugins, and close child contexts
|
||||||
*/
|
*/
|
||||||
public open fun close() {
|
public open fun close() {
|
||||||
|
//recursively closed child context
|
||||||
|
childrenContexts.forEach { it.value.close() }
|
||||||
//detach all plugins
|
//detach all plugins
|
||||||
plugins.forEach { it.detach() }
|
plugins.forEach { it.detach() }
|
||||||
}
|
}
|
||||||
|
|
||||||
override fun toMeta(): Meta = Meta {
|
override fun toMeta(): Meta = Meta {
|
||||||
"parent" to parent?.name
|
"parent" to parent?.name
|
||||||
"properties" put properties.layers.firstOrNull()
|
properties.layers.firstOrNull()?.let { set("properties", it) }
|
||||||
"plugins" put plugins.map { it.toMeta() }
|
"plugins" putIndexed plugins.map { it.toMeta() }
|
||||||
}
|
}
|
||||||
|
|
||||||
public companion object {
|
public companion object {
|
||||||
@ -106,12 +116,4 @@ public interface ContextAware {
|
|||||||
* @return
|
* @return
|
||||||
*/
|
*/
|
||||||
public val context: Context
|
public val context: Context
|
||||||
|
|
||||||
public val logger: KLogger
|
|
||||||
get() = if (this is Named) {
|
|
||||||
KotlinLogging.logger((context.name + this.name).toString())
|
|
||||||
} else {
|
|
||||||
context.logger
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
}
|
@ -0,0 +1,111 @@
|
|||||||
|
package space.kscience.dataforge.context
|
||||||
|
|
||||||
|
import space.kscience.dataforge.meta.Meta
|
||||||
|
import space.kscience.dataforge.meta.MutableMeta
|
||||||
|
import space.kscience.dataforge.meta.seal
|
||||||
|
import space.kscience.dataforge.meta.toMutableMeta
|
||||||
|
import space.kscience.dataforge.misc.DFBuilder
|
||||||
|
import space.kscience.dataforge.misc.DFExperimental
|
||||||
|
import space.kscience.dataforge.names.Name
|
||||||
|
import space.kscience.dataforge.names.NameToken
|
||||||
|
import space.kscience.dataforge.names.asName
|
||||||
|
import space.kscience.dataforge.names.plus
|
||||||
|
import kotlin.collections.component1
|
||||||
|
import kotlin.collections.component2
|
||||||
|
import kotlin.collections.set
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A convenience builder for context
|
||||||
|
*/
|
||||||
|
@DFBuilder
|
||||||
|
public class ContextBuilder internal constructor(
|
||||||
|
private val parent: Context,
|
||||||
|
public val name: Name? = null,
|
||||||
|
meta: Meta = Meta.EMPTY,
|
||||||
|
) {
|
||||||
|
internal val factories = HashMap<PluginFactory<*>, Meta>()
|
||||||
|
internal var meta = meta.toMutableMeta()
|
||||||
|
|
||||||
|
public fun properties(action: MutableMeta.() -> Unit) {
|
||||||
|
meta.action()
|
||||||
|
}
|
||||||
|
|
||||||
|
@OptIn(DFExperimental::class)
|
||||||
|
private fun findPluginFactory(tag: PluginTag): PluginFactory<*> =
|
||||||
|
parent.gatherInSequence<PluginFactory<*>>(PluginFactory.TYPE).values
|
||||||
|
.find { it.tag.matches(tag) } ?: error("Can't resolve plugin factory for $tag")
|
||||||
|
|
||||||
|
public fun plugin(tag: PluginTag, mutableMeta: MutableMeta.() -> Unit = {}) {
|
||||||
|
val factory = findPluginFactory(tag)
|
||||||
|
factories[factory] = Meta(mutableMeta)
|
||||||
|
}
|
||||||
|
|
||||||
|
public fun plugin(factory: PluginFactory<*>, meta: Meta) {
|
||||||
|
factories[factory] = meta
|
||||||
|
}
|
||||||
|
|
||||||
|
public fun plugin(factory: PluginFactory<*>, mutableMeta: MutableMeta.() -> Unit = {}) {
|
||||||
|
factories[factory] = Meta(mutableMeta)
|
||||||
|
}
|
||||||
|
|
||||||
|
public fun plugin(name: String, group: String = "", version: String = "", action: MutableMeta.() -> Unit = {}) {
|
||||||
|
plugin(PluginTag(name, group, version), action)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Add de-facto existing plugin as a dependency
|
||||||
|
*/
|
||||||
|
public fun plugin(plugin: Plugin) {
|
||||||
|
plugin(DeFactoPluginFactory(plugin))
|
||||||
|
}
|
||||||
|
|
||||||
|
public fun build(): Context {
|
||||||
|
val contextName = name ?: NameToken("@auto",hashCode().toUInt().toString(16)).asName()
|
||||||
|
val plugins = HashMap<PluginTag, Plugin>()
|
||||||
|
|
||||||
|
fun addPlugin(factory: PluginFactory<*>, meta: Meta) {
|
||||||
|
val existing = plugins[factory.tag]
|
||||||
|
// Add if does not exist
|
||||||
|
if (existing == null) {
|
||||||
|
//TODO bypass if parent already has plugin with given meta?
|
||||||
|
val plugin = factory.build(parent, meta)
|
||||||
|
|
||||||
|
for ((depFactory, deoMeta) in plugin.dependsOn()) {
|
||||||
|
addPlugin(depFactory, deoMeta)
|
||||||
|
}
|
||||||
|
|
||||||
|
parent.logger.info { "Loading plugin ${plugin.name} into $contextName" }
|
||||||
|
plugins[plugin.tag] = plugin
|
||||||
|
} else if (existing.meta != meta) {
|
||||||
|
error("Plugin with tag ${factory.tag} and meta $meta already exists in $contextName")
|
||||||
|
}
|
||||||
|
//bypass if exists with the same meta
|
||||||
|
}
|
||||||
|
|
||||||
|
factories.forEach { (factory, meta) ->
|
||||||
|
addPlugin(factory, meta)
|
||||||
|
}
|
||||||
|
|
||||||
|
return Context(contextName, parent, plugins.values.toSet(), meta.seal())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if current context contains all plugins required by the builder and return it does or forks to a new context
|
||||||
|
* if it does not.
|
||||||
|
*/
|
||||||
|
@DFExperimental
|
||||||
|
public fun Context.modify(block: ContextBuilder.() -> Unit): Context {
|
||||||
|
|
||||||
|
fun Context.contains(factory: PluginFactory<*>, meta: Meta): Boolean {
|
||||||
|
val loaded = plugins[factory.tag] ?: return false
|
||||||
|
return loaded.meta == meta
|
||||||
|
}
|
||||||
|
|
||||||
|
val builder = ContextBuilder(this, name + "mod", properties).apply(block)
|
||||||
|
val requiresFork = builder.factories.any { (factory, meta) ->
|
||||||
|
!contains(factory, meta)
|
||||||
|
} || ((properties as Meta) == builder.meta)
|
||||||
|
|
||||||
|
return if (requiresFork) builder.build() else this
|
||||||
|
}
|
@ -0,0 +1,12 @@
|
|||||||
|
package space.kscience.dataforge.context
|
||||||
|
|
||||||
|
import space.kscience.dataforge.meta.Meta
|
||||||
|
|
||||||
|
public fun interface Factory<out T> {
|
||||||
|
public fun build(context: Context, meta: Meta): T
|
||||||
|
}
|
||||||
|
|
||||||
|
public operator fun <T> Factory<T>.invoke(
|
||||||
|
meta: Meta = Meta.EMPTY,
|
||||||
|
context: Context = Global,
|
||||||
|
): T = build(context, meta)
|
@ -0,0 +1,24 @@
|
|||||||
|
package space.kscience.dataforge.context
|
||||||
|
|
||||||
|
import kotlinx.coroutines.CoroutineName
|
||||||
|
import kotlinx.coroutines.Job
|
||||||
|
import space.kscience.dataforge.meta.Meta
|
||||||
|
import space.kscience.dataforge.names.asName
|
||||||
|
import space.kscience.dataforge.names.parseAsName
|
||||||
|
import kotlin.coroutines.CoroutineContext
|
||||||
|
import kotlin.native.concurrent.ThreadLocal
|
||||||
|
|
||||||
|
internal expect fun getGlobalLoggerFactory(): PluginFactory<out LogManager>
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A global root context. Closing [Global] terminates the framework.
|
||||||
|
*/
|
||||||
|
@ThreadLocal
|
||||||
|
private object GlobalContext : Context("GLOBAL".asName(), null, emptySet(), Meta.EMPTY) {
|
||||||
|
override val coroutineContext: CoroutineContext = Job() + CoroutineName("GlobalContext")
|
||||||
|
}
|
||||||
|
|
||||||
|
public val Global: Context get() = GlobalContext
|
||||||
|
|
||||||
|
public fun Context(name: String? = null, block: ContextBuilder.() -> Unit = {}): Context =
|
||||||
|
Global.buildContext(name?.parseAsName(), block)
|
@ -0,0 +1,89 @@
|
|||||||
|
package space.kscience.dataforge.context
|
||||||
|
|
||||||
|
import space.kscience.dataforge.meta.Meta
|
||||||
|
import space.kscience.dataforge.misc.Named
|
||||||
|
import space.kscience.dataforge.names.Name
|
||||||
|
import space.kscience.dataforge.names.plus
|
||||||
|
|
||||||
|
public fun interface Logger {
|
||||||
|
public fun log(tag: String, body: () -> String)
|
||||||
|
}
|
||||||
|
|
||||||
|
public interface LogManager : Plugin, Logger {
|
||||||
|
public fun logger(name: Name): Logger
|
||||||
|
|
||||||
|
public val defaultLogger: Logger
|
||||||
|
|
||||||
|
override fun log(tag: String, body: () -> String): Unit = defaultLogger.log(tag, body)
|
||||||
|
|
||||||
|
public fun log(name: Name, tag: String, body: () -> String): Unit = logger(name).log(tag, body)
|
||||||
|
|
||||||
|
public companion object {
|
||||||
|
public const val TRACE: String = "TRACE"
|
||||||
|
public const val INFO: String = "INFO"
|
||||||
|
public const val DEBUG: String = "DEBUG"
|
||||||
|
public const val WARNING: String = "WARNING"
|
||||||
|
public const val ERROR: String = "ERROR"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public fun Logger.trace(body: () -> String): Unit = log(LogManager.TRACE, body)
|
||||||
|
public fun Logger.info(body: () -> String): Unit = log(LogManager.INFO, body)
|
||||||
|
public fun Logger.debug(body: () -> String): Unit = log(LogManager.DEBUG, body)
|
||||||
|
public fun Logger.warn(body: () -> String): Unit = log(LogManager.WARNING, body)
|
||||||
|
public fun Logger.error(body: () -> String): Unit = log(LogManager.ERROR, body)
|
||||||
|
|
||||||
|
internal val (() -> String).safe: String
|
||||||
|
get() = try {
|
||||||
|
invoke()
|
||||||
|
} catch (t: Throwable) {
|
||||||
|
"Error while evaluating log string: ${t.message}"
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
public fun Logger.error(throwable: Throwable?, body: () -> String): Unit = log(LogManager.ERROR) {
|
||||||
|
buildString {
|
||||||
|
appendLine(body())
|
||||||
|
throwable?.let { appendLine(throwable.stackTraceToString()) }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
public class DefaultLogManager : AbstractPlugin(), LogManager {
|
||||||
|
|
||||||
|
override fun logger(name: Name): Logger = Logger { tag, body ->
|
||||||
|
val message: String = body.safe
|
||||||
|
println("$tag $name: [${context.name}] $message")
|
||||||
|
}
|
||||||
|
|
||||||
|
override val defaultLogger: Logger = logger(Name.EMPTY)
|
||||||
|
|
||||||
|
|
||||||
|
override val tag: PluginTag get() = Companion.tag
|
||||||
|
|
||||||
|
public companion object : PluginFactory<DefaultLogManager> {
|
||||||
|
override fun build(context: Context, meta: Meta): DefaultLogManager = DefaultLogManager()
|
||||||
|
|
||||||
|
override val tag: PluginTag = PluginTag(group = PluginTag.DATAFORGE_GROUP, name = "log.default")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Context log manager inherited from parent
|
||||||
|
*/
|
||||||
|
public val Context.logger: LogManager
|
||||||
|
get() = plugins.find(inherit = true) { it is LogManager } as? LogManager
|
||||||
|
?: getGlobalLoggerFactory().build(context = Global, meta = Meta.EMPTY).apply { attach(Global) }
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The named proxy logger for a context member
|
||||||
|
*/
|
||||||
|
public val ContextAware.logger: Logger
|
||||||
|
get() = if (this is Named) {
|
||||||
|
Logger { tag, body ->
|
||||||
|
context.logger.log(this@logger.name + name, tag, body)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
context.logger
|
||||||
|
}
|
||||||
|
|
@ -1,12 +1,13 @@
|
|||||||
package hep.dataforge.context
|
package space.kscience.dataforge.context
|
||||||
|
|
||||||
import hep.dataforge.context.Plugin.Companion.TARGET
|
import space.kscience.dataforge.context.Plugin.Companion.TARGET
|
||||||
import hep.dataforge.meta.Meta
|
import space.kscience.dataforge.meta.Meta
|
||||||
import hep.dataforge.meta.MetaRepr
|
import space.kscience.dataforge.meta.MetaRepr
|
||||||
import hep.dataforge.names.Name
|
import space.kscience.dataforge.misc.DfId
|
||||||
import hep.dataforge.names.toName
|
import space.kscience.dataforge.misc.Named
|
||||||
import hep.dataforge.provider.Provider
|
import space.kscience.dataforge.names.Name
|
||||||
import hep.dataforge.provider.Type
|
import space.kscience.dataforge.names.parseAsName
|
||||||
|
import space.kscience.dataforge.provider.Provider
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The interface to define a Context plugin. A plugin stores all runtime features of a context.
|
* The interface to define a Context plugin. A plugin stores all runtime features of a context.
|
||||||
@ -17,7 +18,7 @@ import hep.dataforge.provider.Type
|
|||||||
*
|
*
|
||||||
* create - configure - attach - detach - destroy
|
* create - configure - attach - detach - destroy
|
||||||
*/
|
*/
|
||||||
@Type(TARGET)
|
@DfId(TARGET)
|
||||||
public interface Plugin : Named, ContextAware, Provider, MetaRepr {
|
public interface Plugin : Named, ContextAware, Provider, MetaRepr {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -30,14 +31,14 @@ public interface Plugin : Named, ContextAware, Provider, MetaRepr {
|
|||||||
/**
|
/**
|
||||||
* The name of this plugin ignoring version and group
|
* The name of this plugin ignoring version and group
|
||||||
*/
|
*/
|
||||||
override val name: Name get() = tag.name.toName()
|
override val name: Name get() = tag.name.parseAsName()
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Plugin dependencies which are required to attach this plugin. Plugin
|
* Plugin dependencies which are required to attach this plugin. Plugin
|
||||||
* dependencies must be initialized and enabled in the Context before this
|
* dependencies must be initialized and enabled in the Context before this
|
||||||
* plugin is enabled.
|
* plugin is enabled.
|
||||||
*/
|
*/
|
||||||
public fun dependsOn(): Collection<PluginFactory<*>>
|
public fun dependsOn(): Map<PluginFactory<*>, Meta>
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Start this plugin and attach registration info to the context. This method
|
* Start this plugin and attach registration info to the context. This method
|
||||||
@ -52,6 +53,8 @@ public interface Plugin : Named, ContextAware, Provider, MetaRepr {
|
|||||||
*/
|
*/
|
||||||
public fun detach()
|
public fun detach()
|
||||||
|
|
||||||
|
public val isAttached: Boolean
|
||||||
|
|
||||||
override fun toMeta(): Meta = Meta {
|
override fun toMeta(): Meta = Meta {
|
||||||
"context" put context.name.toString()
|
"context" put context.name.toString()
|
||||||
"type" to this::class.simpleName
|
"type" to this::class.simpleName
|
||||||
@ -62,5 +65,4 @@ public interface Plugin : Named, ContextAware, Provider, MetaRepr {
|
|||||||
public companion object {
|
public companion object {
|
||||||
public const val TARGET: String = "plugin"
|
public const val TARGET: String = "plugin"
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
@ -0,0 +1,57 @@
|
|||||||
|
package space.kscience.dataforge.context
|
||||||
|
|
||||||
|
import space.kscience.dataforge.meta.Meta
|
||||||
|
import space.kscience.dataforge.misc.Named
|
||||||
|
import space.kscience.dataforge.names.Name
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A convenience factory to build simple plugins
|
||||||
|
*/
|
||||||
|
public class PluginBuilder(
|
||||||
|
name: String,
|
||||||
|
group: String = "",
|
||||||
|
version: String = "",
|
||||||
|
) {
|
||||||
|
public val tag: PluginTag = PluginTag(name, group, version)
|
||||||
|
|
||||||
|
private val content = HashMap<String, MutableMap<Name, Any>>()
|
||||||
|
private val dependencies = HashMap<PluginFactory<*>, Meta>()
|
||||||
|
|
||||||
|
public fun requires(
|
||||||
|
factory: PluginFactory<*>,
|
||||||
|
meta: Meta = Meta.EMPTY,
|
||||||
|
) {
|
||||||
|
dependencies[factory] = meta
|
||||||
|
}
|
||||||
|
|
||||||
|
public fun provides(target: String, items: Map<Name, Any>) {
|
||||||
|
content.getOrPut(target) { HashMap() }.putAll(items)
|
||||||
|
}
|
||||||
|
|
||||||
|
public fun provides(target: String, vararg items: Named) {
|
||||||
|
provides(target, items.associateBy { it.name })
|
||||||
|
}
|
||||||
|
|
||||||
|
public fun build(): PluginFactory<*> {
|
||||||
|
|
||||||
|
return object : PluginFactory<Plugin> {
|
||||||
|
override val tag: PluginTag get() = this@PluginBuilder.tag
|
||||||
|
|
||||||
|
override fun build(context: Context, meta: Meta): Plugin = object : AbstractPlugin() {
|
||||||
|
override val tag: PluginTag get() = this@PluginBuilder.tag
|
||||||
|
|
||||||
|
override fun content(target: String): Map<Name, Any> = this@PluginBuilder.content[target] ?: emptyMap()
|
||||||
|
|
||||||
|
override fun dependsOn(): Map<PluginFactory<*>, Meta> = this@PluginBuilder.dependencies
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public fun PluginFactory(
|
||||||
|
name: String,
|
||||||
|
group: String = "",
|
||||||
|
version: String = "",
|
||||||
|
block: PluginBuilder.() -> Unit,
|
||||||
|
): PluginFactory<*> = PluginBuilder(name, group, version).apply(block).build()
|
@ -0,0 +1,21 @@
|
|||||||
|
package space.kscience.dataforge.context
|
||||||
|
|
||||||
|
import space.kscience.dataforge.meta.Meta
|
||||||
|
import space.kscience.dataforge.misc.DfId
|
||||||
|
|
||||||
|
@DfId(PluginFactory.TYPE)
|
||||||
|
public interface PluginFactory<T : Plugin> : Factory<T> {
|
||||||
|
public val tag: PluginTag
|
||||||
|
|
||||||
|
public companion object {
|
||||||
|
public const val TYPE: String = "pluginFactory"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Plugin factory created for the specific actual plugin
|
||||||
|
*/
|
||||||
|
internal class DeFactoPluginFactory<T : Plugin>(val plugin: T) : PluginFactory<T> {
|
||||||
|
override fun build(context: Context, meta: Meta): T = plugin
|
||||||
|
override val tag: PluginTag get() = plugin.tag
|
||||||
|
}
|
@ -0,0 +1,100 @@
|
|||||||
|
package space.kscience.dataforge.context
|
||||||
|
|
||||||
|
import space.kscience.dataforge.meta.Meta
|
||||||
|
import space.kscience.dataforge.misc.DFInternal
|
||||||
|
import space.kscience.dataforge.names.plus
|
||||||
|
import kotlin.reflect.KClass
|
||||||
|
import kotlin.reflect.cast
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The manager for plugin system. Should monitor plugin dependencies and locks.
|
||||||
|
*
|
||||||
|
* @property context A context for this plugin manager
|
||||||
|
* @author Alexander Nozik
|
||||||
|
*/
|
||||||
|
public class PluginManager internal constructor(
|
||||||
|
override val context: Context,
|
||||||
|
private val plugins: Set<Plugin>,
|
||||||
|
) : ContextAware, Iterable<Plugin> {
|
||||||
|
|
||||||
|
init {
|
||||||
|
plugins.forEach { it.attach(context) }
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A [PluginManager] of parent context if it is present
|
||||||
|
*/
|
||||||
|
private val parent: PluginManager? = context.parent?.plugins
|
||||||
|
|
||||||
|
/**
|
||||||
|
* List plugins stored in this [PluginManager]. If [inherit] is true, include parent plugins as well
|
||||||
|
*/
|
||||||
|
public fun list(inherit: Boolean): Collection<Plugin> {
|
||||||
|
return if (inherit && parent != null) {
|
||||||
|
plugins + parent.list(true)
|
||||||
|
} else {
|
||||||
|
plugins
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get existing plugin or return null if not present. Only first matching plugin is returned.
|
||||||
|
* @param inherit search for parent [PluginManager] plugins
|
||||||
|
* @param predicate condition for the plugin
|
||||||
|
*/
|
||||||
|
public fun find(inherit: Boolean = true, predicate: (Plugin) -> Boolean): Plugin? =
|
||||||
|
list(inherit).find(predicate)
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Find a loaded plugin via its tag
|
||||||
|
*
|
||||||
|
* @param tag
|
||||||
|
* @return
|
||||||
|
*/
|
||||||
|
public operator fun get(tag: PluginTag, inherit: Boolean = true): Plugin? =
|
||||||
|
find(inherit) { tag.matches(it.tag) }
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Find a loaded plugin via its class. This method does not check if the result is unique and just returns first
|
||||||
|
* plugin matching the class condition.
|
||||||
|
* For safe search provide a tag since tags are checked on load and plugins with the same tag are not allowed
|
||||||
|
* in the same context.
|
||||||
|
*
|
||||||
|
* @param tag
|
||||||
|
* @param type
|
||||||
|
* @param <T>
|
||||||
|
* @return
|
||||||
|
*/
|
||||||
|
@DFInternal
|
||||||
|
public fun <T : Any> getByType(type: KClass<T>, tag: PluginTag? = null, inherit: Boolean = true): T? =
|
||||||
|
find(inherit) { type.isInstance(it) && (tag == null || tag.matches(it.tag)) }?.let { type.cast(it) }
|
||||||
|
|
||||||
|
@OptIn(DFInternal::class)
|
||||||
|
public inline operator fun <reified T : Any> get(tag: PluginTag? = null, recursive: Boolean = true): T? =
|
||||||
|
getByType(T::class, tag, recursive)
|
||||||
|
|
||||||
|
@OptIn(DFInternal::class)
|
||||||
|
public inline operator fun <reified T : Plugin> get(factory: PluginFactory<T>, recursive: Boolean = true): T? =
|
||||||
|
getByType(T::class, factory.tag, recursive)
|
||||||
|
|
||||||
|
override fun iterator(): Iterator<Plugin> = plugins.iterator()
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Fetch a plugin with given meta from the context. If the plugin (with given meta) is already registered, it is returned.
|
||||||
|
* Otherwise, new child context with the plugin is created. In the later case the context could be retrieved from the plugin.
|
||||||
|
*/
|
||||||
|
public inline fun <reified T : Plugin> Context.request(factory: PluginFactory<T>, meta: Meta = Meta.EMPTY): T {
|
||||||
|
val existing = plugins[factory]
|
||||||
|
return if (existing != null && existing.meta == meta) existing
|
||||||
|
else {
|
||||||
|
buildContext(name = this@request.name + factory.tag.name) {
|
||||||
|
plugin(factory, meta)
|
||||||
|
}.plugins[factory]!!
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Deprecated("Replace with request", ReplaceWith("request(factory, meta)"))
|
||||||
|
public inline fun <reified T : Plugin> Context.fetch(factory: PluginFactory<T>, meta: Meta = Meta.EMPTY): T =
|
||||||
|
request(factory, meta)
|
@ -1,7 +1,8 @@
|
|||||||
package hep.dataforge.context
|
package space.kscience.dataforge.context
|
||||||
|
|
||||||
import hep.dataforge.meta.Meta
|
import kotlinx.serialization.Serializable
|
||||||
import hep.dataforge.meta.MetaRepr
|
import space.kscience.dataforge.meta.Meta
|
||||||
|
import space.kscience.dataforge.meta.MetaRepr
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The tag which contains information about name, group and version of some
|
* The tag which contains information about name, group and version of some
|
||||||
@ -9,6 +10,7 @@ import hep.dataforge.meta.MetaRepr
|
|||||||
*
|
*
|
||||||
* @author Alexander Nozik
|
* @author Alexander Nozik
|
||||||
*/
|
*/
|
||||||
|
@Serializable
|
||||||
public data class PluginTag(
|
public data class PluginTag(
|
||||||
val name: String,
|
val name: String,
|
||||||
val group: String = "",
|
val group: String = "",
|
||||||
@ -43,7 +45,7 @@ public data class PluginTag(
|
|||||||
|
|
||||||
public companion object {
|
public companion object {
|
||||||
|
|
||||||
public const val DATAFORGE_GROUP: String = "hep.dataforge"
|
public const val DATAFORGE_GROUP: String = "dataforge"
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Build new PluginTag from standard string representation
|
* Build new PluginTag from standard string representation
|
@ -1,10 +1,10 @@
|
|||||||
package hep.dataforge.context
|
package space.kscience.dataforge.context
|
||||||
|
|
||||||
import hep.dataforge.meta.DFExperimental
|
import space.kscience.dataforge.misc.DFExperimental
|
||||||
import hep.dataforge.names.Name
|
import space.kscience.dataforge.names.Name
|
||||||
import hep.dataforge.names.plus
|
import space.kscience.dataforge.names.plus
|
||||||
import hep.dataforge.provider.Provider
|
import space.kscience.dataforge.provider.Provider
|
||||||
import hep.dataforge.provider.top
|
import space.kscience.dataforge.provider.top
|
||||||
import kotlin.reflect.KClass
|
import kotlin.reflect.KClass
|
||||||
import kotlin.reflect.cast
|
import kotlin.reflect.cast
|
||||||
|
|
||||||
@ -48,8 +48,9 @@ public fun <T : Any> Context.gather(
|
|||||||
putAll(top(target, type))
|
putAll(top(target, type))
|
||||||
plugins.forEach { plugin ->
|
plugins.forEach { plugin ->
|
||||||
plugin.top(target, type).forEach { (name, value) ->
|
plugin.top(target, type).forEach { (name, value) ->
|
||||||
if (containsKey(name)) error("Name conflict during gather. An item with name $name could not be gathered from $plugin because key is already present.")
|
val itemName = plugin.name + name
|
||||||
put(plugin.name + name, value)
|
if (containsKey(itemName)) error("Name conflict during gather. An item with name $name could not be gathered from $plugin because key is already present.")
|
||||||
|
put(itemName, value)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (inherit) {
|
if (inherit) {
|
@ -0,0 +1,35 @@
|
|||||||
|
package space.kscience.dataforge.properties
|
||||||
|
|
||||||
|
|
||||||
|
import space.kscience.dataforge.meta.Meta
|
||||||
|
import space.kscience.dataforge.meta.ObservableMutableMeta
|
||||||
|
import space.kscience.dataforge.meta.transformations.MetaConverter
|
||||||
|
import space.kscience.dataforge.meta.transformations.nullableMetaToObject
|
||||||
|
import space.kscience.dataforge.meta.transformations.nullableObjectToMeta
|
||||||
|
import space.kscience.dataforge.misc.DFExperimental
|
||||||
|
import space.kscience.dataforge.names.Name
|
||||||
|
import space.kscience.dataforge.names.startsWith
|
||||||
|
|
||||||
|
@DFExperimental
|
||||||
|
public class MetaProperty<T : Any>(
|
||||||
|
public val meta: ObservableMutableMeta,
|
||||||
|
public val name: Name,
|
||||||
|
public val converter: MetaConverter<T>,
|
||||||
|
) : Property<T?> {
|
||||||
|
|
||||||
|
override var value: T?
|
||||||
|
get() = converter.nullableMetaToObject(meta[name])
|
||||||
|
set(value) {
|
||||||
|
meta[name] = converter.nullableObjectToMeta(value) ?: Meta.EMPTY
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun onChange(owner: Any?, callback: (T?) -> Unit) {
|
||||||
|
meta.onChange(owner) { name ->
|
||||||
|
if (name.startsWith(this@MetaProperty.name)) callback(converter.nullableMetaToObject(this[name]))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun removeChangeListener(owner: Any?) {
|
||||||
|
meta.removeListener(owner)
|
||||||
|
}
|
||||||
|
}
|
@ -1,10 +1,9 @@
|
|||||||
package hep.dataforge.properties
|
package space.kscience.dataforge.properties
|
||||||
|
|
||||||
import hep.dataforge.meta.DFExperimental
|
|
||||||
import kotlinx.coroutines.CoroutineScope
|
|
||||||
import kotlinx.coroutines.ExperimentalCoroutinesApi
|
import kotlinx.coroutines.ExperimentalCoroutinesApi
|
||||||
import kotlinx.coroutines.flow.MutableStateFlow
|
import kotlinx.coroutines.flow.MutableStateFlow
|
||||||
import kotlinx.coroutines.flow.StateFlow
|
import kotlinx.coroutines.flow.StateFlow
|
||||||
|
import space.kscience.dataforge.misc.DFExperimental
|
||||||
|
|
||||||
@DFExperimental
|
@DFExperimental
|
||||||
public interface Property<T> {
|
public interface Property<T> {
|
||||||
@ -23,12 +22,12 @@ public fun <T> Property<T>.toFlow(): StateFlow<T> = MutableStateFlow(value).also
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Reflect all changes in the [source] property onto this property
|
* Reflect all changes in the [source] property onto this property. Does not reflect changes back.
|
||||||
*
|
*
|
||||||
* @return a mirroring job
|
* @return a mirroring job
|
||||||
*/
|
*/
|
||||||
@DFExperimental
|
@DFExperimental
|
||||||
public fun <T> Property<T>.mirror(source: Property<T>, scope: CoroutineScope) {
|
public fun <T> Property<T>.mirror(source: Property<T>) {
|
||||||
source.onChange(this) {
|
source.onChange(this) {
|
||||||
this.value = it
|
this.value = it
|
||||||
}
|
}
|
@ -0,0 +1,31 @@
|
|||||||
|
package space.kscience.dataforge.properties
|
||||||
|
|
||||||
|
|
||||||
|
import space.kscience.dataforge.meta.Scheme
|
||||||
|
import space.kscience.dataforge.misc.DFExperimental
|
||||||
|
import space.kscience.dataforge.names.parseAsName
|
||||||
|
import space.kscience.dataforge.names.startsWith
|
||||||
|
import kotlin.reflect.KMutableProperty1
|
||||||
|
|
||||||
|
@DFExperimental
|
||||||
|
public fun <S : Scheme, T : Any> S.property(property: KMutableProperty1<S, T?>): Property<T?> =
|
||||||
|
object : Property<T?> {
|
||||||
|
override var value: T?
|
||||||
|
get() = property.get(this@property)
|
||||||
|
set(value) {
|
||||||
|
property.set(this@property, value)
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun onChange(owner: Any?, callback: (T?) -> Unit) {
|
||||||
|
this@property.meta.onChange(this) { name ->
|
||||||
|
if (name.startsWith(property.name.parseAsName(true))) {
|
||||||
|
callback(property.get(this@property))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun removeChangeListener(owner: Any?) {
|
||||||
|
this@property.meta.removeListener(this@property)
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
@ -0,0 +1,92 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2015 Alexander Nozik.
|
||||||
|
*
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
package space.kscience.dataforge.provider
|
||||||
|
|
||||||
|
import space.kscience.dataforge.names.Name
|
||||||
|
import space.kscience.dataforge.names.parseAsName
|
||||||
|
import kotlin.jvm.JvmInline
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Path interface.
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
@JvmInline
|
||||||
|
public value class Path(public val tokens: List<PathToken>) : Iterable<PathToken> {
|
||||||
|
|
||||||
|
override fun iterator(): Iterator<PathToken> = tokens.iterator()
|
||||||
|
|
||||||
|
override fun toString(): String = tokens.joinToString(separator = PATH_SEGMENT_SEPARATOR)
|
||||||
|
|
||||||
|
public companion object {
|
||||||
|
public const val PATH_SEGMENT_SEPARATOR: String = "/"
|
||||||
|
|
||||||
|
public fun parse(path: String): Path = Path(path.split(PATH_SEGMENT_SEPARATOR).map { PathToken.parse(it) })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public val Path.length: Int get() = tokens.size
|
||||||
|
|
||||||
|
public val Path.head: PathToken? get() = tokens.firstOrNull()
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns non-empty optional containing the chain without first segment in case of chain path.
|
||||||
|
*
|
||||||
|
* @return
|
||||||
|
*/
|
||||||
|
public val Path.tail: Path? get() = if (tokens.isEmpty()) null else Path(tokens.drop(1))
|
||||||
|
|
||||||
|
|
||||||
|
public operator fun Path.plus(path: Path): Path = Path(this.tokens + path.tokens)
|
||||||
|
|
||||||
|
public data class PathToken(val name: Name, val target: String? = null) {
|
||||||
|
override fun toString(): String = if (target == null) {
|
||||||
|
name.toString()
|
||||||
|
} else {
|
||||||
|
"$target$TARGET_SEPARATOR$name"
|
||||||
|
}
|
||||||
|
|
||||||
|
public companion object {
|
||||||
|
public const val TARGET_SEPARATOR: String = "::"
|
||||||
|
|
||||||
|
public fun parse(token: String, cache: Boolean = false): PathToken {
|
||||||
|
val target = token.substringBefore(TARGET_SEPARATOR, "")
|
||||||
|
val name = token.substringAfter(TARGET_SEPARATOR).parseAsName(cache)
|
||||||
|
if (target.contains("[")) TODO("target separators in queries are not supported")
|
||||||
|
return PathToken(name, target)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Represent this path token as full path
|
||||||
|
*/
|
||||||
|
public fun PathToken.asPath(): Path = Path(listOf(this))
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Represent a name with optional [target] as a [Path]
|
||||||
|
*/
|
||||||
|
public fun Name.asPath(target: String? = null): Path = PathToken(this, target).asPath()
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Build a path from given names using default targets
|
||||||
|
*/
|
||||||
|
public fun Path(vararg names: Name): Path = Path(names.map { PathToken(it) })
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Use an array of [Name]-target pairs to construct segmented [Path]
|
||||||
|
*/
|
||||||
|
public fun Path(vararg tokens: Pair<Name, String?>): Path = Path(tokens.map { PathToken(it.first, it.second) })
|
@ -13,9 +13,9 @@
|
|||||||
* See the License for the specific language governing permissions and
|
* See the License for the specific language governing permissions and
|
||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
package hep.dataforge.provider
|
package space.kscience.dataforge.provider
|
||||||
|
|
||||||
import hep.dataforge.names.Name
|
import space.kscience.dataforge.names.Name
|
||||||
import kotlin.reflect.KClass
|
import kotlin.reflect.KClass
|
||||||
import kotlin.reflect.safeCast
|
import kotlin.reflect.safeCast
|
||||||
|
|
||||||
@ -75,15 +75,13 @@ public inline fun <reified T : Any> Provider.provide(path: String, targetOverrid
|
|||||||
/**
|
/**
|
||||||
* Typed top level content
|
* Typed top level content
|
||||||
*/
|
*/
|
||||||
public fun <T : Any> Provider.top(target: String, type: KClass<out T>): Map<Name, T> {
|
public fun <T : Any> Provider.top(target: String, type: KClass<out T>): Map<Name, T> = content(target).mapValues {
|
||||||
return content(target).mapValues {
|
type.safeCast(it.value) ?: error("The type of element ${it.value} is ${it.value::class} but $type is expected")
|
||||||
type.safeCast(it.value) ?: error("The type of element $it is ${it::class} but $type is expected")
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Typed top level content
|
* Typed top level content
|
||||||
*/
|
*/
|
||||||
public inline fun <reified T : Any> Provider.top(target: String): Map<Name, T> = top(target, T::class)
|
public inline fun <reified T : Any> Provider.top(target: String ): Map<Name, T> = top(target, T::class)
|
||||||
|
|
||||||
|
|
@ -1,8 +1,7 @@
|
|||||||
package hep.dataforge.context
|
package space.kscience.dataforge.context
|
||||||
|
|
||||||
import hep.dataforge.names.Name
|
import space.kscience.dataforge.names.Name
|
||||||
import hep.dataforge.names.appendLeft
|
import space.kscience.dataforge.names.appendFirst
|
||||||
import hep.dataforge.names.toName
|
|
||||||
import kotlin.test.Test
|
import kotlin.test.Test
|
||||||
import kotlin.test.assertEquals
|
import kotlin.test.assertEquals
|
||||||
|
|
||||||
@ -12,8 +11,8 @@ class ContextTest {
|
|||||||
override val tag get() = PluginTag("test")
|
override val tag get() = PluginTag("test")
|
||||||
|
|
||||||
override fun content(target: String): Map<Name, Any> {
|
override fun content(target: String): Map<Name, Any> {
|
||||||
return when(target){
|
return when (target) {
|
||||||
"test" -> listOf("a", "b", "c.d").associate { it.toName() to it.toName() }
|
"test" -> listOf("a", "b", "c.d").associate { Name.parse(it) to Name.parse(it) }
|
||||||
else -> emptyMap()
|
else -> emptyMap()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -21,14 +20,13 @@ class ContextTest {
|
|||||||
|
|
||||||
@Test
|
@Test
|
||||||
fun testPluginManager() {
|
fun testPluginManager() {
|
||||||
val context = Global.context("test"){
|
val context = Context("test") {
|
||||||
plugin(DummyPlugin())
|
plugin(DummyPlugin())
|
||||||
}
|
}
|
||||||
//Global.plugins.load(DummyPlugin())
|
|
||||||
val members = context.gather<Name>("test")
|
val members = context.gather<Name>("test")
|
||||||
assertEquals(3, members.count())
|
assertEquals(3, members.count())
|
||||||
members.forEach {
|
members.forEach {
|
||||||
assertEquals(it.key, it.value.appendLeft("test"))
|
assertEquals(it.key, it.value.appendFirst("test"))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -0,0 +1,28 @@
|
|||||||
|
package space.kscience.dataforge.properties
|
||||||
|
|
||||||
|
import space.kscience.dataforge.meta.Scheme
|
||||||
|
import space.kscience.dataforge.meta.SchemeSpec
|
||||||
|
import space.kscience.dataforge.meta.int
|
||||||
|
import space.kscience.dataforge.misc.DFExperimental
|
||||||
|
import kotlin.test.Test
|
||||||
|
import kotlin.test.assertEquals
|
||||||
|
|
||||||
|
internal class TestScheme : Scheme() {
|
||||||
|
var a by int()
|
||||||
|
var b by int()
|
||||||
|
companion object : SchemeSpec<TestScheme>(::TestScheme)
|
||||||
|
}
|
||||||
|
|
||||||
|
@DFExperimental
|
||||||
|
class MetaPropertiesTest {
|
||||||
|
@Test
|
||||||
|
fun testBinding() {
|
||||||
|
val scheme = TestScheme.empty()
|
||||||
|
val a = scheme.property(TestScheme::a)
|
||||||
|
val b = scheme.property(TestScheme::b)
|
||||||
|
a.bind(b)
|
||||||
|
scheme.a = 2
|
||||||
|
assertEquals(2, scheme.b)
|
||||||
|
assertEquals(2, b.value)
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,14 @@
|
|||||||
|
package space.kscience.dataforge.provider
|
||||||
|
|
||||||
|
import kotlin.test.Test
|
||||||
|
import kotlin.test.assertEquals
|
||||||
|
|
||||||
|
class PathTest {
|
||||||
|
@Test
|
||||||
|
fun testParse(){
|
||||||
|
val nameString = "a.b.c.d"
|
||||||
|
val pathString = "a.b/c.d"
|
||||||
|
assertEquals(1, Path.parse(nameString).length)
|
||||||
|
assertEquals(2, Path.parse(pathString).length)
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,32 @@
|
|||||||
|
package space.kscience.dataforge.context
|
||||||
|
|
||||||
|
import space.kscience.dataforge.meta.Meta
|
||||||
|
import space.kscience.dataforge.names.Name
|
||||||
|
|
||||||
|
public class ConsoleLogManager : AbstractPlugin(), LogManager {
|
||||||
|
|
||||||
|
override fun logger(name: Name): Logger = Logger { tag, body ->
|
||||||
|
val message: String = body.safe
|
||||||
|
when (tag) {
|
||||||
|
// TODO depends on https://youtrack.jetbrains.com/issue/KT-33595/
|
||||||
|
LogManager.DEBUG -> console.asDynamic().debug("[${context.name}] $name: $message")
|
||||||
|
LogManager.INFO -> console.info("[${context.name}] $name: $message")
|
||||||
|
LogManager.WARNING -> console.warn("[${context.name}] $name: $message")
|
||||||
|
LogManager.ERROR -> console.error("[${context.name}] $name: $message")
|
||||||
|
else -> console.log("[${context.name}] $name: $message")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
override val defaultLogger: Logger = logger(Name.EMPTY)
|
||||||
|
|
||||||
|
|
||||||
|
override val tag: PluginTag get() = Companion.tag
|
||||||
|
|
||||||
|
public companion object : PluginFactory<ConsoleLogManager> {
|
||||||
|
override fun build(context: Context, meta: Meta): ConsoleLogManager = ConsoleLogManager()
|
||||||
|
|
||||||
|
override val tag: PluginTag = PluginTag(group = PluginTag.DATAFORGE_GROUP, name = "log.jsConsole")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
internal actual fun getGlobalLoggerFactory(): PluginFactory<out LogManager> = ConsoleLogManager
|
@ -1,10 +1,10 @@
|
|||||||
package hep.dataforge.properties
|
package space.kscience.dataforge.properties
|
||||||
|
|
||||||
import hep.dataforge.meta.DFExperimental
|
|
||||||
import org.w3c.dom.HTMLInputElement
|
import org.w3c.dom.HTMLInputElement
|
||||||
|
import space.kscience.dataforge.misc.DFExperimental
|
||||||
|
|
||||||
@DFExperimental
|
@DFExperimental
|
||||||
fun HTMLInputElement.bindValue(property: Property<String>) {
|
public fun HTMLInputElement.bindValue(property: Property<String>) {
|
||||||
if (this.onchange != null) error("Input element already bound")
|
if (this.onchange != null) error("Input element already bound")
|
||||||
this.onchange = {
|
this.onchange = {
|
||||||
property.value = this.value
|
property.value = this.value
|
||||||
@ -18,7 +18,7 @@ fun HTMLInputElement.bindValue(property: Property<String>) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@DFExperimental
|
@DFExperimental
|
||||||
fun HTMLInputElement.bindChecked(property: Property<Boolean>) {
|
public fun HTMLInputElement.bindChecked(property: Property<Boolean>) {
|
||||||
if (this.onchange != null) error("Input element already bound")
|
if (this.onchange != null) error("Input element already bound")
|
||||||
this.onchange = {
|
this.onchange = {
|
||||||
property.value = this.checked
|
property.value = this.checked
|
@ -1,36 +0,0 @@
|
|||||||
package hep.dataforge.provider
|
|
||||||
|
|
||||||
import hep.dataforge.context.Context
|
|
||||||
import hep.dataforge.context.gather
|
|
||||||
import hep.dataforge.meta.DFExperimental
|
|
||||||
import hep.dataforge.names.Name
|
|
||||||
import kotlin.reflect.KClass
|
|
||||||
import kotlin.reflect.full.findAnnotation
|
|
||||||
|
|
||||||
|
|
||||||
@DFExperimental
|
|
||||||
public val KClass<*>.dfType: String
|
|
||||||
get() = findAnnotation<Type>()?.id ?: simpleName ?: ""
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Provide an object with given name inferring target from its type using [Type] annotation
|
|
||||||
*/
|
|
||||||
@DFExperimental
|
|
||||||
public inline fun <reified T : Any> Provider.provideByType(name: String): T? {
|
|
||||||
val target = T::class.dfType
|
|
||||||
return provide(target, name)
|
|
||||||
}
|
|
||||||
|
|
||||||
@DFExperimental
|
|
||||||
public inline fun <reified T : Any> Provider.top(): Map<Name, T> {
|
|
||||||
val target = T::class.dfType
|
|
||||||
return top(target)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* All objects provided by plugins with given target and type
|
|
||||||
*/
|
|
||||||
@DFExperimental
|
|
||||||
public inline fun <reified T : Any> Context.gather(inherit: Boolean = true): Map<Name, T> =
|
|
||||||
gather<T>(T::class.dfType, inherit)
|
|
||||||
|
|
@ -13,7 +13,7 @@
|
|||||||
* See the License for the specific language governing permissions and
|
* See the License for the specific language governing permissions and
|
||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
package hep.dataforge.context
|
package space.kscience.dataforge.context
|
||||||
|
|
||||||
import java.util.*
|
import java.util.*
|
||||||
import kotlin.reflect.KClass
|
import kotlin.reflect.KClass
|
@ -0,0 +1,32 @@
|
|||||||
|
package space.kscience.dataforge.context
|
||||||
|
|
||||||
|
import org.slf4j.LoggerFactory
|
||||||
|
import space.kscience.dataforge.meta.Meta
|
||||||
|
import space.kscience.dataforge.names.Name
|
||||||
|
|
||||||
|
public class SlfLogManager : AbstractPlugin(), LogManager {
|
||||||
|
|
||||||
|
override fun logger(name: Name): Logger = Logger { tag, body ->
|
||||||
|
val logger = LoggerFactory.getLogger("[${context.name}] $name") //KotlinLogging.logger("[${context.name}] $name")
|
||||||
|
val message = body.safe
|
||||||
|
when (tag) {
|
||||||
|
LogManager.DEBUG -> logger.debug(message)
|
||||||
|
LogManager.INFO -> logger.info(message)
|
||||||
|
LogManager.WARNING -> logger.warn(message)
|
||||||
|
LogManager.ERROR -> logger.error(message)
|
||||||
|
else -> logger.trace(message)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
override val defaultLogger: Logger = logger(Name.EMPTY)
|
||||||
|
|
||||||
|
override val tag: PluginTag get() = Companion.tag
|
||||||
|
|
||||||
|
public companion object : PluginFactory<SlfLogManager> {
|
||||||
|
override fun build(context: Context, meta: Meta): SlfLogManager = SlfLogManager()
|
||||||
|
|
||||||
|
override val tag: PluginTag = PluginTag(group = PluginTag.DATAFORGE_GROUP, name = "log.kotlinLogging")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
internal actual fun getGlobalLoggerFactory(): PluginFactory<out LogManager> = SlfLogManager
|
@ -14,38 +14,34 @@
|
|||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
package hep.dataforge.descriptors
|
package space.kscience.dataforge.descriptors
|
||||||
|
|
||||||
import hep.dataforge.meta.DFExperimental
|
//@MustBeDocumented
|
||||||
import hep.dataforge.values.ValueType
|
//annotation class Attribute(
|
||||||
import kotlin.reflect.KClass
|
// val key: String,
|
||||||
|
// val value: String
|
||||||
@MustBeDocumented
|
//)
|
||||||
annotation class Attribute(
|
//
|
||||||
val key: String,
|
//@MustBeDocumented
|
||||||
val value: String
|
//annotation class Attributes(
|
||||||
)
|
// val attrs: Array<Attribute>
|
||||||
|
//)
|
||||||
@MustBeDocumented
|
//
|
||||||
annotation class Attributes(
|
//@MustBeDocumented
|
||||||
val attrs: Array<Attribute>
|
//annotation class ItemDef(
|
||||||
)
|
// val info: String = "",
|
||||||
|
// val multiple: Boolean = false,
|
||||||
@MustBeDocumented
|
// val required: Boolean = false
|
||||||
annotation class ItemDef(
|
//)
|
||||||
val info: String = "",
|
//
|
||||||
val multiple: Boolean = false,
|
//@Target(AnnotationTarget.PROPERTY)
|
||||||
val required: Boolean = false
|
//@MustBeDocumented
|
||||||
)
|
//annotation class ValueDef(
|
||||||
|
// val type: Array<ValueType> = [ValueType.STRING],
|
||||||
@Target(AnnotationTarget.PROPERTY)
|
// val def: String = "",
|
||||||
@MustBeDocumented
|
// val allowed: Array<String> = [],
|
||||||
annotation class ValueDef(
|
// val enumeration: KClass<*> = Any::class
|
||||||
val type: Array<ValueType> = [ValueType.STRING],
|
//)
|
||||||
val def: String = "",
|
|
||||||
val allowed: Array<String> = [],
|
|
||||||
val enumeration: KClass<*> = Any::class
|
|
||||||
)
|
|
||||||
|
|
||||||
///**
|
///**
|
||||||
// * Description text for meta property, node or whole object
|
// * Description text for meta property, node or whole object
|
@ -1,4 +1,4 @@
|
|||||||
package hep.dataforge.descriptors
|
package space.kscience.dataforge.descriptors
|
||||||
|
|
||||||
|
|
||||||
//inline fun <reified T : Scheme> T.buildDescriptor(): NodeDescriptor = NodeDescriptor {
|
//inline fun <reified T : Scheme> T.buildDescriptor(): NodeDescriptor = NodeDescriptor {
|
@ -0,0 +1,49 @@
|
|||||||
|
package space.kscience.dataforge.provider
|
||||||
|
|
||||||
|
import space.kscience.dataforge.context.Context
|
||||||
|
import space.kscience.dataforge.context.PluginBuilder
|
||||||
|
import space.kscience.dataforge.context.gather
|
||||||
|
import space.kscience.dataforge.misc.DFExperimental
|
||||||
|
import space.kscience.dataforge.misc.DfId
|
||||||
|
import space.kscience.dataforge.misc.Named
|
||||||
|
import space.kscience.dataforge.names.Name
|
||||||
|
import kotlin.reflect.KClass
|
||||||
|
import kotlin.reflect.full.findAnnotation
|
||||||
|
|
||||||
|
|
||||||
|
@DFExperimental
|
||||||
|
public val KClass<*>.dfId: String
|
||||||
|
get() = findAnnotation<DfId>()?.id ?: simpleName ?: ""
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Provide an object with given name inferring target from its type using [DfId] annotation
|
||||||
|
*/
|
||||||
|
@DFExperimental
|
||||||
|
public inline fun <reified T : Any> Provider.provideByType(name: String): T? {
|
||||||
|
val target = T::class.dfId
|
||||||
|
return provide(target, name)
|
||||||
|
}
|
||||||
|
|
||||||
|
@DFExperimental
|
||||||
|
public inline fun <reified T : Any> Provider.top(): Map<Name, T> {
|
||||||
|
val target = T::class.dfId
|
||||||
|
return top(target)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* All objects provided by plugins with given target and type
|
||||||
|
*/
|
||||||
|
@DFExperimental
|
||||||
|
public inline fun <reified T : Any> Context.gather(inherit: Boolean = true): Map<Name, T> =
|
||||||
|
gather<T>(T::class.dfId, inherit)
|
||||||
|
|
||||||
|
|
||||||
|
@DFExperimental
|
||||||
|
public inline fun <reified T : Any> PluginBuilder.provides(items: Map<Name, T>) {
|
||||||
|
provides(T::class.dfId, items)
|
||||||
|
}
|
||||||
|
|
||||||
|
@DFExperimental
|
||||||
|
public inline fun <reified T : Any> PluginBuilder.provides(vararg items: Named) {
|
||||||
|
provides(T::class.dfId, *items)
|
||||||
|
}
|
@ -0,0 +1,4 @@
|
|||||||
|
package space.kscience.dataforge.context
|
||||||
|
|
||||||
|
|
||||||
|
internal actual fun getGlobalLoggerFactory(): PluginFactory<out LogManager> = DefaultLogManager
|
23
dataforge-data/README.md
Normal file
23
dataforge-data/README.md
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
# Module dataforge-data
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
## Artifact:
|
||||||
|
|
||||||
|
The Maven coordinates of this project are `space.kscience:dataforge-data:0.7.0`.
|
||||||
|
|
||||||
|
**Gradle Kotlin DSL:**
|
||||||
|
```kotlin
|
||||||
|
repositories {
|
||||||
|
maven("https://repo.kotlin.link")
|
||||||
|
//uncomment to access development builds
|
||||||
|
//maven("https://maven.pkg.jetbrains.space/spc/p/sci/dev")
|
||||||
|
mavenCentral()
|
||||||
|
}
|
||||||
|
|
||||||
|
dependencies {
|
||||||
|
implementation("space.kscience:dataforge-data:0.7.0")
|
||||||
|
}
|
||||||
|
```
|
@ -26,6 +26,24 @@ public final class hep/dataforge/data/ActionKt {
|
|||||||
public static final fun then (Lhep/dataforge/data/Action;Lhep/dataforge/data/Action;)Lhep/dataforge/data/Action;
|
public static final fun then (Lhep/dataforge/data/Action;Lhep/dataforge/data/Action;)Lhep/dataforge/data/Action;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public final class hep/dataforge/data/ComputationData : hep/dataforge/data/ComputationGoal, hep/dataforge/data/Data {
|
||||||
|
public fun <init> (Lkotlin/reflect/KClass;Lhep/dataforge/meta/Meta;Lkotlin/coroutines/CoroutineContext;Ljava/util/Collection;Lkotlin/jvm/functions/Function2;)V
|
||||||
|
public synthetic fun <init> (Lkotlin/reflect/KClass;Lhep/dataforge/meta/Meta;Lkotlin/coroutines/CoroutineContext;Ljava/util/Collection;Lkotlin/jvm/functions/Function2;ILkotlin/jvm/internal/DefaultConstructorMarker;)V
|
||||||
|
public fun getMeta ()Lhep/dataforge/meta/Meta;
|
||||||
|
public fun getType ()Lkotlin/reflect/KClass;
|
||||||
|
public fun toMeta ()Lhep/dataforge/meta/Meta;
|
||||||
|
}
|
||||||
|
|
||||||
|
public class hep/dataforge/data/ComputationGoal : hep/dataforge/data/Goal {
|
||||||
|
public fun <init> (Lkotlin/coroutines/CoroutineContext;Ljava/util/Collection;Lkotlin/jvm/functions/Function2;)V
|
||||||
|
public synthetic fun <init> (Lkotlin/coroutines/CoroutineContext;Ljava/util/Collection;Lkotlin/jvm/functions/Function2;ILkotlin/jvm/internal/DefaultConstructorMarker;)V
|
||||||
|
public final fun getBlock ()Lkotlin/jvm/functions/Function2;
|
||||||
|
public fun getDependencies ()Ljava/util/Collection;
|
||||||
|
public final fun getResult ()Lkotlinx/coroutines/Deferred;
|
||||||
|
public fun reset ()V
|
||||||
|
public fun startAsync (Lkotlinx/coroutines/CoroutineScope;)Lkotlinx/coroutines/Deferred;
|
||||||
|
}
|
||||||
|
|
||||||
public final class hep/dataforge/data/CoroutineMonitor : kotlin/coroutines/CoroutineContext$Element {
|
public final class hep/dataforge/data/CoroutineMonitor : kotlin/coroutines/CoroutineContext$Element {
|
||||||
public static final field Companion Lhep/dataforge/data/CoroutineMonitor$Companion;
|
public static final field Companion Lhep/dataforge/data/CoroutineMonitor$Companion;
|
||||||
public fun <init> ()V
|
public fun <init> ()V
|
||||||
@ -79,14 +97,6 @@ public final class hep/dataforge/data/Data$DefaultImpls {
|
|||||||
public static fun toMeta (Lhep/dataforge/data/Data;)Lhep/dataforge/meta/Meta;
|
public static fun toMeta (Lhep/dataforge/data/Data;)Lhep/dataforge/meta/Meta;
|
||||||
}
|
}
|
||||||
|
|
||||||
public final class hep/dataforge/data/DataCastKt {
|
|
||||||
public static final fun canCast (Lhep/dataforge/data/DataItem;Lkotlin/reflect/KClass;)Z
|
|
||||||
public static final fun cast (Lhep/dataforge/data/Data;Lkotlin/reflect/KClass;)Lhep/dataforge/data/Data;
|
|
||||||
public static final fun cast (Lhep/dataforge/data/DataNode;Lkotlin/reflect/KClass;)Lhep/dataforge/data/DataNode;
|
|
||||||
public static final fun ensureType (Lhep/dataforge/data/DataNode;Lkotlin/reflect/KClass;)V
|
|
||||||
public static final fun upcast (Lhep/dataforge/data/Data;Lkotlin/reflect/KClass;)Lhep/dataforge/data/Data;
|
|
||||||
}
|
|
||||||
|
|
||||||
public final class hep/dataforge/data/DataFilter : hep/dataforge/meta/Scheme {
|
public final class hep/dataforge/data/DataFilter : hep/dataforge/meta/Scheme {
|
||||||
public static final field Companion Lhep/dataforge/data/DataFilter$Companion;
|
public static final field Companion Lhep/dataforge/data/DataFilter$Companion;
|
||||||
public fun <init> ()V
|
public fun <init> ()V
|
||||||
@ -129,17 +139,22 @@ public final class hep/dataforge/data/DataItem$Node : hep/dataforge/data/DataIte
|
|||||||
}
|
}
|
||||||
|
|
||||||
public final class hep/dataforge/data/DataJVMKt {
|
public final class hep/dataforge/data/DataJVMKt {
|
||||||
|
public static final fun canCast (Lhep/dataforge/data/DataItem;Lkotlin/reflect/KClass;)Z
|
||||||
|
public static final fun cast (Lhep/dataforge/data/Data;Lkotlin/reflect/KClass;)Lhep/dataforge/data/Data;
|
||||||
|
public static final fun cast (Lhep/dataforge/data/DataNode;Lkotlin/reflect/KClass;)Lhep/dataforge/data/DataNode;
|
||||||
|
public static final fun ensureType (Lhep/dataforge/data/DataNode;Lkotlin/reflect/KClass;)V
|
||||||
public static final fun filterIsInstance (Lhep/dataforge/data/Data;Lkotlin/reflect/KClass;)Lhep/dataforge/data/Data;
|
public static final fun filterIsInstance (Lhep/dataforge/data/Data;Lkotlin/reflect/KClass;)Lhep/dataforge/data/Data;
|
||||||
public static final fun filterIsInstance (Lhep/dataforge/data/DataItem;Lkotlin/reflect/KClass;)Lhep/dataforge/data/DataItem;
|
public static final fun filterIsInstance (Lhep/dataforge/data/DataItem;Lkotlin/reflect/KClass;)Lhep/dataforge/data/DataItem;
|
||||||
public static final fun filterIsInstance (Lhep/dataforge/data/DataNode;Lkotlin/reflect/KClass;)Lhep/dataforge/data/DataNode;
|
public static final fun filterIsInstance (Lhep/dataforge/data/DataNode;Lkotlin/reflect/KClass;)Lhep/dataforge/data/DataNode;
|
||||||
public static final fun get (Lhep/dataforge/data/Data;)Ljava/lang/Object;
|
public static final fun get (Lhep/dataforge/data/Data;)Ljava/lang/Object;
|
||||||
|
public static final fun upcast (Lhep/dataforge/data/Data;Lkotlin/reflect/KClass;)Lhep/dataforge/data/Data;
|
||||||
}
|
}
|
||||||
|
|
||||||
public final class hep/dataforge/data/DataKt {
|
public final class hep/dataforge/data/DataKt {
|
||||||
public static final fun map (Lhep/dataforge/data/Data;Lkotlin/reflect/KClass;Lkotlin/coroutines/CoroutineContext;Lhep/dataforge/meta/Meta;Lkotlin/jvm/functions/Function3;)Lhep/dataforge/data/Data;
|
public static final fun map (Lhep/dataforge/data/Data;Lkotlin/reflect/KClass;Lkotlin/coroutines/CoroutineContext;Lhep/dataforge/meta/Meta;Lkotlin/jvm/functions/Function3;)Lhep/dataforge/data/Data;
|
||||||
public static synthetic fun map$default (Lhep/dataforge/data/Data;Lkotlin/reflect/KClass;Lkotlin/coroutines/CoroutineContext;Lhep/dataforge/meta/Meta;Lkotlin/jvm/functions/Function3;ILjava/lang/Object;)Lhep/dataforge/data/Data;
|
public static synthetic fun map$default (Lhep/dataforge/data/Data;Lkotlin/reflect/KClass;Lkotlin/coroutines/CoroutineContext;Lhep/dataforge/meta/Meta;Lkotlin/jvm/functions/Function3;ILjava/lang/Object;)Lhep/dataforge/data/Data;
|
||||||
public static final fun reduce (Ljava/util/Map;Lkotlin/reflect/KClass;Lkotlin/coroutines/CoroutineContext;Lhep/dataforge/meta/Meta;Lkotlin/jvm/functions/Function3;)Lhep/dataforge/data/DynamicData;
|
public static final fun reduce (Ljava/util/Map;Lkotlin/reflect/KClass;Lkotlin/coroutines/CoroutineContext;Lhep/dataforge/meta/Meta;Lkotlin/jvm/functions/Function3;)Lhep/dataforge/data/ComputationData;
|
||||||
public static synthetic fun reduce$default (Ljava/util/Map;Lkotlin/reflect/KClass;Lkotlin/coroutines/CoroutineContext;Lhep/dataforge/meta/Meta;Lkotlin/jvm/functions/Function3;ILjava/lang/Object;)Lhep/dataforge/data/DynamicData;
|
public static synthetic fun reduce$default (Ljava/util/Map;Lkotlin/reflect/KClass;Lkotlin/coroutines/CoroutineContext;Lhep/dataforge/meta/Meta;Lkotlin/jvm/functions/Function3;ILjava/lang/Object;)Lhep/dataforge/data/ComputationData;
|
||||||
}
|
}
|
||||||
|
|
||||||
public abstract interface class hep/dataforge/data/DataNode : hep/dataforge/meta/MetaRepr {
|
public abstract interface class hep/dataforge/data/DataNode : hep/dataforge/meta/MetaRepr {
|
||||||
@ -148,50 +163,36 @@ public abstract interface class hep/dataforge/data/DataNode : hep/dataforge/meta
|
|||||||
public abstract fun getItems ()Ljava/util/Map;
|
public abstract fun getItems ()Ljava/util/Map;
|
||||||
public abstract fun getMeta ()Lhep/dataforge/meta/Meta;
|
public abstract fun getMeta ()Lhep/dataforge/meta/Meta;
|
||||||
public abstract fun getType ()Lkotlin/reflect/KClass;
|
public abstract fun getType ()Lkotlin/reflect/KClass;
|
||||||
public abstract fun startAll (Lkotlinx/coroutines/CoroutineScope;)Lkotlinx/coroutines/Job;
|
|
||||||
public abstract fun toMeta ()Lhep/dataforge/meta/Meta;
|
public abstract fun toMeta ()Lhep/dataforge/meta/Meta;
|
||||||
}
|
}
|
||||||
|
|
||||||
public final class hep/dataforge/data/DataNode$Companion {
|
public final class hep/dataforge/data/DataNode$Companion {
|
||||||
public static final field TYPE Ljava/lang/String;
|
public static final field TYPE Ljava/lang/String;
|
||||||
public final fun builder (Lkotlin/reflect/KClass;)Lhep/dataforge/data/DataTreeBuilder;
|
public final fun builder (Lkotlin/reflect/KClass;)Lhep/dataforge/data/DataTreeBuilder;
|
||||||
public final fun invoke (Lkotlin/reflect/KClass;Lkotlin/jvm/functions/Function1;)Lhep/dataforge/data/DataTree;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public final class hep/dataforge/data/DataNode$DefaultImpls {
|
public final class hep/dataforge/data/DataNode$DefaultImpls {
|
||||||
public static fun startAll (Lhep/dataforge/data/DataNode;Lkotlinx/coroutines/CoroutineScope;)Lkotlinx/coroutines/Job;
|
|
||||||
public static fun toMeta (Lhep/dataforge/data/DataNode;)Lhep/dataforge/meta/Meta;
|
public static fun toMeta (Lhep/dataforge/data/DataNode;)Lhep/dataforge/meta/Meta;
|
||||||
}
|
}
|
||||||
|
|
||||||
public final class hep/dataforge/data/DataNodeKt {
|
public final class hep/dataforge/data/DataNodeKt {
|
||||||
public static final fun asSequence (Lhep/dataforge/data/DataNode;)Lkotlin/sequences/Sequence;
|
|
||||||
public static final fun builder (Lhep/dataforge/data/DataNode;)Lhep/dataforge/data/DataTreeBuilder;
|
|
||||||
public static final fun dataSequence (Lhep/dataforge/data/DataNode;)Lkotlin/sequences/Sequence;
|
public static final fun dataSequence (Lhep/dataforge/data/DataNode;)Lkotlin/sequences/Sequence;
|
||||||
public static final fun datum (Lhep/dataforge/data/DataTreeBuilder;Lhep/dataforge/names/Name;Lhep/dataforge/data/Data;)V
|
|
||||||
public static final fun datum (Lhep/dataforge/data/DataTreeBuilder;Ljava/lang/String;Lhep/dataforge/data/Data;)V
|
|
||||||
public static final fun filter (Lhep/dataforge/data/DataNode;Lkotlin/jvm/functions/Function2;)Lhep/dataforge/data/DataNode;
|
public static final fun filter (Lhep/dataforge/data/DataNode;Lkotlin/jvm/functions/Function2;)Lhep/dataforge/data/DataNode;
|
||||||
public static final fun first (Lhep/dataforge/data/DataNode;)Lhep/dataforge/data/Data;
|
public static final fun first (Lhep/dataforge/data/DataNode;)Lhep/dataforge/data/Data;
|
||||||
public static final fun get (Lhep/dataforge/data/DataNode;Lhep/dataforge/names/Name;)Lhep/dataforge/data/DataItem;
|
public static final fun get (Lhep/dataforge/data/DataNode;Lhep/dataforge/names/Name;)Lhep/dataforge/data/DataItem;
|
||||||
public static final fun get (Lhep/dataforge/data/DataNode;Ljava/lang/String;)Lhep/dataforge/data/DataItem;
|
public static final fun get (Lhep/dataforge/data/DataNode;Ljava/lang/String;)Lhep/dataforge/data/DataItem;
|
||||||
public static final fun getData (Lhep/dataforge/data/DataItem;)Lhep/dataforge/data/Data;
|
public static final fun getData (Lhep/dataforge/data/DataItem;)Lhep/dataforge/data/Data;
|
||||||
public static final fun getNode (Lhep/dataforge/data/DataItem;)Lhep/dataforge/data/DataNode;
|
public static final fun getNode (Lhep/dataforge/data/DataItem;)Lhep/dataforge/data/DataNode;
|
||||||
|
public static final fun itemSequence (Lhep/dataforge/data/DataNode;)Lkotlin/sequences/Sequence;
|
||||||
public static final fun iterator (Lhep/dataforge/data/DataNode;)Ljava/util/Iterator;
|
public static final fun iterator (Lhep/dataforge/data/DataNode;)Ljava/util/Iterator;
|
||||||
public static final fun join (Lhep/dataforge/data/DataNode;Lkotlin/coroutines/Continuation;)Ljava/lang/Object;
|
public static final fun join (Lhep/dataforge/data/DataNode;Lkotlin/coroutines/Continuation;)Ljava/lang/Object;
|
||||||
public static final fun node (Lhep/dataforge/data/DataTreeBuilder;Lhep/dataforge/names/Name;Lhep/dataforge/data/DataNode;)V
|
public static final fun startAll (Lhep/dataforge/data/DataNode;Lkotlinx/coroutines/CoroutineScope;)Lkotlinx/coroutines/Job;
|
||||||
public static final fun node (Lhep/dataforge/data/DataTreeBuilder;Ljava/lang/String;Lhep/dataforge/data/DataNode;)V
|
|
||||||
public static final fun static (Lhep/dataforge/data/DataTreeBuilder;Lhep/dataforge/names/Name;Ljava/lang/Object;Lhep/dataforge/meta/Meta;)V
|
|
||||||
public static final fun static (Lhep/dataforge/data/DataTreeBuilder;Lhep/dataforge/names/Name;Ljava/lang/Object;Lkotlin/jvm/functions/Function1;)V
|
|
||||||
public static final fun static (Lhep/dataforge/data/DataTreeBuilder;Ljava/lang/String;Ljava/lang/Object;Lkotlin/jvm/functions/Function1;)V
|
|
||||||
public static synthetic fun static$default (Lhep/dataforge/data/DataTreeBuilder;Lhep/dataforge/names/Name;Ljava/lang/Object;Lhep/dataforge/meta/Meta;ILjava/lang/Object;)V
|
|
||||||
public static synthetic fun static$default (Lhep/dataforge/data/DataTreeBuilder;Lhep/dataforge/names/Name;Ljava/lang/Object;Lkotlin/jvm/functions/Function1;ILjava/lang/Object;)V
|
|
||||||
public static synthetic fun static$default (Lhep/dataforge/data/DataTreeBuilder;Ljava/lang/String;Ljava/lang/Object;Lkotlin/jvm/functions/Function1;ILjava/lang/Object;)V
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public final class hep/dataforge/data/DataTree : hep/dataforge/data/DataNode {
|
public final class hep/dataforge/data/DataTree : hep/dataforge/data/DataNode {
|
||||||
public fun getItems ()Ljava/util/Map;
|
public fun getItems ()Ljava/util/Map;
|
||||||
public fun getMeta ()Lhep/dataforge/meta/Meta;
|
public fun getMeta ()Lhep/dataforge/meta/Meta;
|
||||||
public fun getType ()Lkotlin/reflect/KClass;
|
public fun getType ()Lkotlin/reflect/KClass;
|
||||||
public fun startAll (Lkotlinx/coroutines/CoroutineScope;)Lkotlinx/coroutines/Job;
|
|
||||||
public fun toMeta ()Lhep/dataforge/meta/Meta;
|
public fun toMeta ()Lhep/dataforge/meta/Meta;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -214,6 +215,21 @@ public final class hep/dataforge/data/DataTreeBuilder {
|
|||||||
public final fun update (Lhep/dataforge/data/DataNode;)V
|
public final fun update (Lhep/dataforge/data/DataNode;)V
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public final class hep/dataforge/data/DataTreeBuilderKt {
|
||||||
|
public static final fun DataTree (Lkotlin/reflect/KClass;Lkotlin/jvm/functions/Function1;)Lhep/dataforge/data/DataTree;
|
||||||
|
public static final fun builder (Lhep/dataforge/data/DataNode;)Lhep/dataforge/data/DataTreeBuilder;
|
||||||
|
public static final fun datum (Lhep/dataforge/data/DataTreeBuilder;Lhep/dataforge/names/Name;Lhep/dataforge/data/Data;)V
|
||||||
|
public static final fun datum (Lhep/dataforge/data/DataTreeBuilder;Ljava/lang/String;Lhep/dataforge/data/Data;)V
|
||||||
|
public static final fun node (Lhep/dataforge/data/DataTreeBuilder;Lhep/dataforge/names/Name;Lhep/dataforge/data/DataNode;)V
|
||||||
|
public static final fun node (Lhep/dataforge/data/DataTreeBuilder;Ljava/lang/String;Lhep/dataforge/data/DataNode;)V
|
||||||
|
public static final fun static (Lhep/dataforge/data/DataTreeBuilder;Lhep/dataforge/names/Name;Ljava/lang/Object;Lhep/dataforge/meta/Meta;)V
|
||||||
|
public static final fun static (Lhep/dataforge/data/DataTreeBuilder;Lhep/dataforge/names/Name;Ljava/lang/Object;Lkotlin/jvm/functions/Function1;)V
|
||||||
|
public static final fun static (Lhep/dataforge/data/DataTreeBuilder;Ljava/lang/String;Ljava/lang/Object;Lkotlin/jvm/functions/Function1;)V
|
||||||
|
public static synthetic fun static$default (Lhep/dataforge/data/DataTreeBuilder;Lhep/dataforge/names/Name;Ljava/lang/Object;Lhep/dataforge/meta/Meta;ILjava/lang/Object;)V
|
||||||
|
public static synthetic fun static$default (Lhep/dataforge/data/DataTreeBuilder;Lhep/dataforge/names/Name;Ljava/lang/Object;Lkotlin/jvm/functions/Function1;ILjava/lang/Object;)V
|
||||||
|
public static synthetic fun static$default (Lhep/dataforge/data/DataTreeBuilder;Ljava/lang/String;Ljava/lang/Object;Lkotlin/jvm/functions/Function1;ILjava/lang/Object;)V
|
||||||
|
}
|
||||||
|
|
||||||
public final class hep/dataforge/data/Dependencies : kotlin/coroutines/CoroutineContext$Element {
|
public final class hep/dataforge/data/Dependencies : kotlin/coroutines/CoroutineContext$Element {
|
||||||
public static final field Companion Lhep/dataforge/data/Dependencies$Companion;
|
public static final field Companion Lhep/dataforge/data/Dependencies$Companion;
|
||||||
public fun <init> (Ljava/util/Collection;)V
|
public fun <init> (Ljava/util/Collection;)V
|
||||||
@ -228,24 +244,6 @@ public final class hep/dataforge/data/Dependencies : kotlin/coroutines/Coroutine
|
|||||||
public final class hep/dataforge/data/Dependencies$Companion : kotlin/coroutines/CoroutineContext$Key {
|
public final class hep/dataforge/data/Dependencies$Companion : kotlin/coroutines/CoroutineContext$Key {
|
||||||
}
|
}
|
||||||
|
|
||||||
public final class hep/dataforge/data/DynamicData : hep/dataforge/data/DynamicGoal, hep/dataforge/data/Data {
|
|
||||||
public fun <init> (Lkotlin/reflect/KClass;Lhep/dataforge/meta/Meta;Lkotlin/coroutines/CoroutineContext;Ljava/util/Collection;Lkotlin/jvm/functions/Function2;)V
|
|
||||||
public synthetic fun <init> (Lkotlin/reflect/KClass;Lhep/dataforge/meta/Meta;Lkotlin/coroutines/CoroutineContext;Ljava/util/Collection;Lkotlin/jvm/functions/Function2;ILkotlin/jvm/internal/DefaultConstructorMarker;)V
|
|
||||||
public fun getMeta ()Lhep/dataforge/meta/Meta;
|
|
||||||
public fun getType ()Lkotlin/reflect/KClass;
|
|
||||||
public fun toMeta ()Lhep/dataforge/meta/Meta;
|
|
||||||
}
|
|
||||||
|
|
||||||
public class hep/dataforge/data/DynamicGoal : hep/dataforge/data/Goal {
|
|
||||||
public fun <init> (Lkotlin/coroutines/CoroutineContext;Ljava/util/Collection;Lkotlin/jvm/functions/Function2;)V
|
|
||||||
public synthetic fun <init> (Lkotlin/coroutines/CoroutineContext;Ljava/util/Collection;Lkotlin/jvm/functions/Function2;ILkotlin/jvm/internal/DefaultConstructorMarker;)V
|
|
||||||
public final fun getBlock ()Lkotlin/jvm/functions/Function2;
|
|
||||||
public fun getDependencies ()Ljava/util/Collection;
|
|
||||||
public final fun getResult ()Lkotlinx/coroutines/Deferred;
|
|
||||||
public fun reset ()V
|
|
||||||
public fun startAsync (Lkotlinx/coroutines/CoroutineScope;)Lkotlinx/coroutines/Deferred;
|
|
||||||
}
|
|
||||||
|
|
||||||
public final class hep/dataforge/data/FragmentRule {
|
public final class hep/dataforge/data/FragmentRule {
|
||||||
public field result Lkotlin/jvm/functions/Function2;
|
public field result Lkotlin/jvm/functions/Function2;
|
||||||
public fun <init> (Lhep/dataforge/names/Name;Lhep/dataforge/meta/MetaBuilder;)V
|
public fun <init> (Lhep/dataforge/names/Name;Lhep/dataforge/meta/MetaBuilder;)V
|
||||||
@ -302,9 +300,7 @@ public final class hep/dataforge/data/JoinGroup {
|
|||||||
}
|
}
|
||||||
|
|
||||||
public final class hep/dataforge/data/MapAction : hep/dataforge/data/Action {
|
public final class hep/dataforge/data/MapAction : hep/dataforge/data/Action {
|
||||||
public fun <init> (Lkotlin/reflect/KClass;Lkotlin/reflect/KClass;Lkotlin/jvm/functions/Function1;)V
|
public fun <init> (Lkotlin/reflect/KClass;Lkotlin/jvm/functions/Function1;)V
|
||||||
public final fun getInputType ()Lkotlin/reflect/KClass;
|
|
||||||
public final fun getOutputType ()Lkotlin/reflect/KClass;
|
|
||||||
public fun invoke (Lhep/dataforge/data/DataNode;Lhep/dataforge/meta/Meta;)Lhep/dataforge/data/DataNode;
|
public fun invoke (Lhep/dataforge/data/DataNode;Lhep/dataforge/meta/Meta;)Lhep/dataforge/data/DataNode;
|
||||||
public fun isTerminal ()Z
|
public fun isTerminal ()Z
|
||||||
}
|
}
|
||||||
@ -335,9 +331,7 @@ public final class hep/dataforge/data/NamedData : hep/dataforge/data/Data {
|
|||||||
}
|
}
|
||||||
|
|
||||||
public final class hep/dataforge/data/ReduceAction : hep/dataforge/data/Action {
|
public final class hep/dataforge/data/ReduceAction : hep/dataforge/data/Action {
|
||||||
public fun <init> (Lkotlin/reflect/KClass;Lkotlin/reflect/KClass;Lkotlin/jvm/functions/Function1;)V
|
public fun <init> (Lkotlin/reflect/KClass;Lkotlin/jvm/functions/Function1;)V
|
||||||
public final fun getInputType ()Lkotlin/reflect/KClass;
|
|
||||||
public final fun getOutputType ()Lkotlin/reflect/KClass;
|
|
||||||
public fun invoke (Lhep/dataforge/data/DataNode;Lhep/dataforge/meta/Meta;)Lhep/dataforge/data/DataNode;
|
public fun invoke (Lhep/dataforge/data/DataNode;Lhep/dataforge/meta/Meta;)Lhep/dataforge/data/DataNode;
|
||||||
public fun isTerminal ()Z
|
public fun isTerminal ()Z
|
||||||
}
|
}
|
||||||
@ -357,9 +351,7 @@ public final class hep/dataforge/data/ReduceGroupBuilder {
|
|||||||
}
|
}
|
||||||
|
|
||||||
public final class hep/dataforge/data/SplitAction : hep/dataforge/data/Action {
|
public final class hep/dataforge/data/SplitAction : hep/dataforge/data/Action {
|
||||||
public fun <init> (Lkotlin/reflect/KClass;Lkotlin/reflect/KClass;Lkotlin/jvm/functions/Function1;)V
|
public fun <init> (Lkotlin/reflect/KClass;Lkotlin/jvm/functions/Function1;)V
|
||||||
public final fun getInputType ()Lkotlin/reflect/KClass;
|
|
||||||
public final fun getOutputType ()Lkotlin/reflect/KClass;
|
|
||||||
public fun invoke (Lhep/dataforge/data/DataNode;Lhep/dataforge/meta/Meta;)Lhep/dataforge/data/DataNode;
|
public fun invoke (Lhep/dataforge/data/DataNode;Lhep/dataforge/meta/Meta;)Lhep/dataforge/data/DataNode;
|
||||||
public fun isTerminal ()Z
|
public fun isTerminal ()Z
|
||||||
}
|
}
|
||||||
@ -394,7 +386,6 @@ public final class hep/dataforge/data/TypeFilteredDataNode : hep/dataforge/data/
|
|||||||
public fun getMeta ()Lhep/dataforge/meta/Meta;
|
public fun getMeta ()Lhep/dataforge/meta/Meta;
|
||||||
public final fun getOrigin ()Lhep/dataforge/data/DataNode;
|
public final fun getOrigin ()Lhep/dataforge/data/DataNode;
|
||||||
public fun getType ()Lkotlin/reflect/KClass;
|
public fun getType ()Lkotlin/reflect/KClass;
|
||||||
public fun startAll (Lkotlinx/coroutines/CoroutineScope;)Lkotlinx/coroutines/Job;
|
|
||||||
public fun toMeta ()Lhep/dataforge/meta/Meta;
|
public fun toMeta ()Lhep/dataforge/meta/Meta;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,23 +1,18 @@
|
|||||||
plugins {
|
plugins {
|
||||||
id("ru.mipt.npm.mpp")
|
id("space.kscience.gradle.mpp")
|
||||||
id("ru.mipt.npm.native")
|
|
||||||
}
|
}
|
||||||
|
|
||||||
kscience{
|
kscience{
|
||||||
|
jvm()
|
||||||
|
js()
|
||||||
|
native()
|
||||||
useCoroutines()
|
useCoroutines()
|
||||||
|
dependencies {
|
||||||
|
api(project(":dataforge-meta"))
|
||||||
|
api(kotlin("reflect"))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
kotlin {
|
readme{
|
||||||
sourceSets {
|
maturity = space.kscience.gradle.Maturity.EXPERIMENTAL
|
||||||
commonMain{
|
}
|
||||||
dependencies {
|
|
||||||
api(project(":dataforge-meta"))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
jvmMain{
|
|
||||||
dependencies{
|
|
||||||
api(kotlin("reflect"))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
@ -1,35 +0,0 @@
|
|||||||
package hep.dataforge.data
|
|
||||||
|
|
||||||
import hep.dataforge.meta.Meta
|
|
||||||
|
|
||||||
/**
|
|
||||||
* A simple data transformation on a data node
|
|
||||||
*/
|
|
||||||
public interface Action<in T : Any, out R : Any> {
|
|
||||||
/**
|
|
||||||
* Transform the data in the node, producing a new node. By default it is assumed that all calculations are lazy
|
|
||||||
* so not actual computation is started at this moment
|
|
||||||
*/
|
|
||||||
public operator fun invoke(node: DataNode<T>, meta: Meta): DataNode<R>
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Terminal action is the one that could not be invoked lazily and requires some kind of blocking computation to invoke
|
|
||||||
*/
|
|
||||||
public val isTerminal: Boolean get() = false
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Action composition. The result is terminal if one of its parts is terminal
|
|
||||||
*/
|
|
||||||
public infix fun <T : Any, I : Any, R : Any> Action<T, I>.then(action: Action<I, R>): Action<T, R> {
|
|
||||||
// TODO introduce composite action and add optimize by adding action to the list
|
|
||||||
return object : Action<T, R> {
|
|
||||||
override fun invoke(node: DataNode<T>, meta: Meta): DataNode<R> {
|
|
||||||
return action(this@then.invoke(node, meta), meta)
|
|
||||||
}
|
|
||||||
|
|
||||||
override val isTerminal: Boolean
|
|
||||||
get() = this@then.isTerminal || action.isTerminal
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
@ -1,162 +0,0 @@
|
|||||||
package hep.dataforge.data
|
|
||||||
|
|
||||||
import hep.dataforge.meta.Meta
|
|
||||||
import hep.dataforge.meta.MetaRepr
|
|
||||||
import hep.dataforge.meta.isEmpty
|
|
||||||
import kotlinx.coroutines.CoroutineScope
|
|
||||||
import kotlin.coroutines.CoroutineContext
|
|
||||||
import kotlin.coroutines.EmptyCoroutineContext
|
|
||||||
import kotlin.reflect.KClass
|
|
||||||
|
|
||||||
/**
|
|
||||||
* A data element characterized by its meta
|
|
||||||
*/
|
|
||||||
public interface Data<out T : Any> : Goal<T>, MetaRepr{
|
|
||||||
/**
|
|
||||||
* Type marker for the data. The type is known before the calculation takes place so it could be checked.
|
|
||||||
*/
|
|
||||||
public val type: KClass<out T>
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Meta for the data
|
|
||||||
*/
|
|
||||||
public val meta: Meta
|
|
||||||
|
|
||||||
override fun toMeta(): Meta = Meta {
|
|
||||||
"type" put (type.simpleName?:"undefined")
|
|
||||||
if(!meta.isEmpty()) {
|
|
||||||
"meta" put meta
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public companion object {
|
|
||||||
public const val TYPE: String = "data"
|
|
||||||
|
|
||||||
public operator fun <T : Any> invoke(
|
|
||||||
type: KClass<out T>,
|
|
||||||
meta: Meta = Meta.EMPTY,
|
|
||||||
context: CoroutineContext = EmptyCoroutineContext,
|
|
||||||
dependencies: Collection<Data<*>> = emptyList(),
|
|
||||||
block: suspend CoroutineScope.() -> T
|
|
||||||
): Data<T> = DynamicData(type, meta, context, dependencies, block)
|
|
||||||
|
|
||||||
public inline operator fun <reified T : Any> invoke(
|
|
||||||
meta: Meta = Meta.EMPTY,
|
|
||||||
context: CoroutineContext = EmptyCoroutineContext,
|
|
||||||
dependencies: Collection<Data<*>> = emptyList(),
|
|
||||||
noinline block: suspend CoroutineScope.() -> T
|
|
||||||
): Data<T> = invoke(T::class, meta, context, dependencies, block)
|
|
||||||
|
|
||||||
public operator fun <T : Any> invoke(
|
|
||||||
name: String,
|
|
||||||
type: KClass<out T>,
|
|
||||||
meta: Meta = Meta.EMPTY,
|
|
||||||
context: CoroutineContext = EmptyCoroutineContext,
|
|
||||||
dependencies: Collection<Data<*>> = emptyList(),
|
|
||||||
block: suspend CoroutineScope.() -> T
|
|
||||||
): Data<T> = NamedData(name, invoke(type, meta, context, dependencies, block))
|
|
||||||
|
|
||||||
public inline operator fun <reified T : Any> invoke(
|
|
||||||
name: String,
|
|
||||||
meta: Meta = Meta.EMPTY,
|
|
||||||
context: CoroutineContext = EmptyCoroutineContext,
|
|
||||||
dependencies: Collection<Data<*>> = emptyList(),
|
|
||||||
noinline block: suspend CoroutineScope.() -> T
|
|
||||||
): Data<T> =
|
|
||||||
invoke(name, T::class, meta, context, dependencies, block)
|
|
||||||
|
|
||||||
public fun <T : Any> static(value: T, meta: Meta = Meta.EMPTY): Data<T> =
|
|
||||||
StaticData(value, meta)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
public class DynamicData<T : Any>(
|
|
||||||
override val type: KClass<out T>,
|
|
||||||
override val meta: Meta = Meta.EMPTY,
|
|
||||||
context: CoroutineContext = EmptyCoroutineContext,
|
|
||||||
dependencies: Collection<Data<*>> = emptyList(),
|
|
||||||
block: suspend CoroutineScope.() -> T
|
|
||||||
) : Data<T>, DynamicGoal<T>(context, dependencies, block)
|
|
||||||
|
|
||||||
public class StaticData<T : Any>(
|
|
||||||
value: T,
|
|
||||||
override val meta: Meta = Meta.EMPTY
|
|
||||||
) : Data<T>, StaticGoal<T>(value) {
|
|
||||||
override val type: KClass<out T> get() = value::class
|
|
||||||
}
|
|
||||||
|
|
||||||
public class NamedData<out T : Any>(public val name: String, data: Data<T>) : Data<T> by data
|
|
||||||
|
|
||||||
public fun <T : Any, R : Any> Data<T>.map(
|
|
||||||
outputType: KClass<out R>,
|
|
||||||
coroutineContext: CoroutineContext = EmptyCoroutineContext,
|
|
||||||
meta: Meta = this.meta,
|
|
||||||
block: suspend CoroutineScope.(T) -> R
|
|
||||||
): Data<R> = DynamicData(outputType, meta, coroutineContext, listOf(this)) {
|
|
||||||
block(await())
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Create a data pipe
|
|
||||||
*/
|
|
||||||
public inline fun <T : Any, reified R : Any> Data<T>.map(
|
|
||||||
coroutineContext: CoroutineContext = EmptyCoroutineContext,
|
|
||||||
meta: Meta = this.meta,
|
|
||||||
noinline block: suspend CoroutineScope.(T) -> R
|
|
||||||
): Data<R> = DynamicData(R::class, meta, coroutineContext, listOf(this)) {
|
|
||||||
block(await())
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Create a joined data.
|
|
||||||
*/
|
|
||||||
public inline fun <T : Any, reified R : Any> Collection<Data<T>>.reduce(
|
|
||||||
coroutineContext: CoroutineContext = EmptyCoroutineContext,
|
|
||||||
meta: Meta,
|
|
||||||
noinline block: suspend CoroutineScope.(Collection<T>) -> R
|
|
||||||
): Data<R> = DynamicData(
|
|
||||||
R::class,
|
|
||||||
meta,
|
|
||||||
coroutineContext,
|
|
||||||
this
|
|
||||||
) {
|
|
||||||
block(map { run { it.await() } })
|
|
||||||
}
|
|
||||||
|
|
||||||
public fun <K, T : Any, R : Any> Map<K, Data<T>>.reduce(
|
|
||||||
outputType: KClass<out R>,
|
|
||||||
coroutineContext: CoroutineContext = EmptyCoroutineContext,
|
|
||||||
meta: Meta,
|
|
||||||
block: suspend CoroutineScope.(Map<K, T>) -> R
|
|
||||||
): DynamicData<R> = DynamicData(
|
|
||||||
outputType,
|
|
||||||
meta,
|
|
||||||
coroutineContext,
|
|
||||||
this.values
|
|
||||||
) {
|
|
||||||
block(mapValues { it.value.await() })
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
|
||||||
* A joining of multiple data into a single one
|
|
||||||
* @param K type of the map key
|
|
||||||
* @param T type of the input goal
|
|
||||||
* @param R type of the result goal
|
|
||||||
*/
|
|
||||||
public inline fun <K, T : Any, reified R : Any> Map<K, Data<T>>.reduce(
|
|
||||||
coroutineContext: CoroutineContext = EmptyCoroutineContext,
|
|
||||||
meta: Meta,
|
|
||||||
noinline block: suspend CoroutineScope.(Map<K, T>) -> R
|
|
||||||
): DynamicData<R> = DynamicData(
|
|
||||||
R::class,
|
|
||||||
meta,
|
|
||||||
coroutineContext,
|
|
||||||
this.values
|
|
||||||
) {
|
|
||||||
block(mapValues { it.value.await() })
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
@ -1,53 +0,0 @@
|
|||||||
package hep.dataforge.data
|
|
||||||
|
|
||||||
import hep.dataforge.meta.*
|
|
||||||
import hep.dataforge.names.toName
|
|
||||||
|
|
||||||
|
|
||||||
public class DataFilter : Scheme() {
|
|
||||||
/**
|
|
||||||
* A source node for the filter
|
|
||||||
*/
|
|
||||||
public var from: String? by string()
|
|
||||||
/**
|
|
||||||
* A target placement for the filtered node
|
|
||||||
*/
|
|
||||||
public var to: String? by string()
|
|
||||||
/**
|
|
||||||
* A regular expression pattern for the filter
|
|
||||||
*/
|
|
||||||
public var pattern: String by string(".*")
|
|
||||||
// val prefix by string()
|
|
||||||
// val suffix by string()
|
|
||||||
|
|
||||||
public companion object : SchemeSpec<DataFilter>(::DataFilter)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Apply meta-based filter to given data node
|
|
||||||
*/
|
|
||||||
public fun <T : Any> DataNode<T>.filter(filter: DataFilter): DataNode<T> {
|
|
||||||
val sourceNode = filter.from?.let { get(it.toName()).node } ?: this@filter
|
|
||||||
val regex = filter.pattern.toRegex()
|
|
||||||
val targetNode = DataTreeBuilder(type).apply {
|
|
||||||
sourceNode.dataSequence().forEach { (name, data) ->
|
|
||||||
if (name.toString().matches(regex)) {
|
|
||||||
this[name] = data
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return filter.to?.let {
|
|
||||||
DataTreeBuilder(type).apply { this[it.toName()] = targetNode }.build()
|
|
||||||
} ?: targetNode.build()
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Filter data using [DataFilter] specification
|
|
||||||
*/
|
|
||||||
public fun <T : Any> DataNode<T>.filter(filter: Meta): DataNode<T> = filter(DataFilter.read(filter))
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Filter data using [DataFilter] builder
|
|
||||||
*/
|
|
||||||
public fun <T : Any> DataNode<T>.filter(filterBuilder: DataFilter.() -> Unit): DataNode<T> =
|
|
||||||
filter(DataFilter(filterBuilder))
|
|
@ -1,296 +0,0 @@
|
|||||||
package hep.dataforge.data
|
|
||||||
|
|
||||||
import hep.dataforge.meta.*
|
|
||||||
import hep.dataforge.names.*
|
|
||||||
import kotlinx.coroutines.CoroutineScope
|
|
||||||
import kotlinx.coroutines.Job
|
|
||||||
import kotlinx.coroutines.coroutineScope
|
|
||||||
import kotlinx.coroutines.launch
|
|
||||||
import kotlin.collections.component1
|
|
||||||
import kotlin.collections.component2
|
|
||||||
import kotlin.collections.set
|
|
||||||
import kotlin.reflect.KClass
|
|
||||||
|
|
||||||
public sealed class DataItem<out T : Any> : MetaRepr {
|
|
||||||
public abstract val type: KClass<out T>
|
|
||||||
|
|
||||||
public abstract val meta: Meta
|
|
||||||
|
|
||||||
public class Node<out T : Any>(public val node: DataNode<T>) : DataItem<T>() {
|
|
||||||
override val type: KClass<out T> get() = node.type
|
|
||||||
|
|
||||||
override fun toMeta(): Meta = node.toMeta()
|
|
||||||
|
|
||||||
override val meta: Meta get() = node.meta
|
|
||||||
}
|
|
||||||
|
|
||||||
public class Leaf<out T : Any>(public val data: Data<T>) : DataItem<T>() {
|
|
||||||
override val type: KClass<out T> get() = data.type
|
|
||||||
|
|
||||||
override fun toMeta(): Meta = data.toMeta()
|
|
||||||
|
|
||||||
override val meta: Meta get() = data.meta
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* A tree-like data structure grouped into the node. All data inside the node must inherit its type
|
|
||||||
*/
|
|
||||||
public interface DataNode<out T : Any> : MetaRepr {
|
|
||||||
|
|
||||||
/**
|
|
||||||
* The minimal common ancestor to all data in the node
|
|
||||||
*/
|
|
||||||
public val type: KClass<out T>
|
|
||||||
|
|
||||||
public val items: Map<NameToken, DataItem<T>>
|
|
||||||
|
|
||||||
public val meta: Meta
|
|
||||||
|
|
||||||
override fun toMeta(): Meta = Meta {
|
|
||||||
"type" put (type.simpleName ?: "undefined")
|
|
||||||
"items" put {
|
|
||||||
this@DataNode.items.forEach {
|
|
||||||
it.key.toString() put it.value.toMeta()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Start computation for all goals in data node and return a job for the whole node
|
|
||||||
*/
|
|
||||||
@Suppress("DeferredResultUnused")
|
|
||||||
public fun CoroutineScope.startAll(): Job = launch {
|
|
||||||
items.values.forEach {
|
|
||||||
when (it) {
|
|
||||||
is DataItem.Node<*> -> it.node.run { startAll() }
|
|
||||||
is DataItem.Leaf<*> -> it.data.run { startAsync() }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public companion object {
|
|
||||||
public const val TYPE: String = "dataNode"
|
|
||||||
|
|
||||||
public operator fun <T : Any> invoke(type: KClass<out T>, block: DataTreeBuilder<T>.() -> Unit): DataTree<T> =
|
|
||||||
DataTreeBuilder(type).apply(block).build()
|
|
||||||
|
|
||||||
public inline operator fun <reified T : Any> invoke(noinline block: DataTreeBuilder<T>.() -> Unit): DataTree<T> =
|
|
||||||
DataTreeBuilder(T::class).apply(block).build()
|
|
||||||
|
|
||||||
public fun <T : Any> builder(type: KClass<out T>): DataTreeBuilder<T> = DataTreeBuilder(type)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public suspend fun <T: Any> DataNode<T>.join(): Unit = coroutineScope { startAll().join() }
|
|
||||||
|
|
||||||
public val <T : Any> DataItem<T>?.node: DataNode<T>? get() = (this as? DataItem.Node<T>)?.node
|
|
||||||
public val <T : Any> DataItem<T>?.data: Data<T>? get() = (this as? DataItem.Leaf<T>)?.data
|
|
||||||
|
|
||||||
public operator fun <T : Any> DataNode<T>.get(name: Name): DataItem<T>? = when (name.length) {
|
|
||||||
0 -> error("Empty name")
|
|
||||||
1 -> items[name.firstOrNull()]
|
|
||||||
else -> get(name.firstOrNull()!!.asName()).node?.get(name.cutFirst())
|
|
||||||
}
|
|
||||||
|
|
||||||
public operator fun <T : Any> DataNode<T>.get(name: String): DataItem<T>? = get(name.toName())
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Sequence of all children including nodes
|
|
||||||
*/
|
|
||||||
public fun <T : Any> DataNode<T>.asSequence(): Sequence<Pair<Name, DataItem<T>>> = sequence {
|
|
||||||
items.forEach { (head, item) ->
|
|
||||||
yield(head.asName() to item)
|
|
||||||
if (item is DataItem.Node) {
|
|
||||||
val subSequence = item.node.asSequence()
|
|
||||||
.map { (name, data) -> (head.asName() + name) to data }
|
|
||||||
yieldAll(subSequence)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Sequence of data entries
|
|
||||||
*/
|
|
||||||
public fun <T : Any> DataNode<T>.dataSequence(): Sequence<Pair<Name, Data<T>>> = sequence {
|
|
||||||
items.forEach { (head, item) ->
|
|
||||||
when (item) {
|
|
||||||
is DataItem.Leaf -> yield(head.asName() to item.data)
|
|
||||||
is DataItem.Node -> {
|
|
||||||
val subSequence = item.node.dataSequence()
|
|
||||||
.map { (name, data) -> (head.asName() + name) to data }
|
|
||||||
yieldAll(subSequence)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public operator fun <T : Any> DataNode<T>.iterator(): Iterator<Pair<Name, DataItem<T>>> = asSequence().iterator()
|
|
||||||
|
|
||||||
public class DataTree<out T : Any> internal constructor(
|
|
||||||
override val type: KClass<out T>,
|
|
||||||
override val items: Map<NameToken, DataItem<T>>,
|
|
||||||
override val meta: Meta
|
|
||||||
) : DataNode<T>
|
|
||||||
|
|
||||||
private sealed class DataTreeBuilderItem<out T : Any> {
|
|
||||||
class Node<T : Any>(val tree: DataTreeBuilder<T>) : DataTreeBuilderItem<T>()
|
|
||||||
class Leaf<T : Any>(val value: Data<T>) : DataTreeBuilderItem<T>()
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* A builder for a DataTree.
|
|
||||||
*/
|
|
||||||
@DFBuilder
|
|
||||||
public class DataTreeBuilder<T : Any>(public val type: KClass<out T>) {
|
|
||||||
private val map = HashMap<NameToken, DataTreeBuilderItem<T>>()
|
|
||||||
|
|
||||||
private var meta = MetaBuilder()
|
|
||||||
|
|
||||||
public operator fun set(token: NameToken, node: DataTreeBuilder<out T>) {
|
|
||||||
if (map.containsKey(token)) error("Tree entry with name $token is not empty")
|
|
||||||
map[token] = DataTreeBuilderItem.Node(node)
|
|
||||||
}
|
|
||||||
|
|
||||||
public operator fun set(token: NameToken, data: Data<T>) {
|
|
||||||
if (map.containsKey(token)) error("Tree entry with name $token is not empty")
|
|
||||||
map[token] = DataTreeBuilderItem.Leaf(data)
|
|
||||||
}
|
|
||||||
|
|
||||||
private fun buildNode(token: NameToken): DataTreeBuilder<T> {
|
|
||||||
return if (!map.containsKey(token)) {
|
|
||||||
DataTreeBuilder(type).also { map[token] = DataTreeBuilderItem.Node(it) }
|
|
||||||
} else {
|
|
||||||
(map[token] as? DataTreeBuilderItem.Node<T> ?: error("The node with name $token is occupied by leaf")).tree
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private fun buildNode(name: Name): DataTreeBuilder<T> {
|
|
||||||
return when (name.length) {
|
|
||||||
0 -> this
|
|
||||||
1 -> buildNode(name.firstOrNull()!!)
|
|
||||||
else -> buildNode(name.firstOrNull()!!).buildNode(name.cutFirst())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public operator fun set(name: Name, data: Data<T>) {
|
|
||||||
when (name.length) {
|
|
||||||
0 -> error("Can't add data with empty name")
|
|
||||||
1 -> set(name.firstOrNull()!!, data)
|
|
||||||
2 -> buildNode(name.cutLast())[name.lastOrNull()!!] = data
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public operator fun set(name: Name, node: DataTreeBuilder<out T>) {
|
|
||||||
when (name.length) {
|
|
||||||
0 -> error("Can't add data with empty name")
|
|
||||||
1 -> set(name.firstOrNull()!!, node)
|
|
||||||
2 -> buildNode(name.cutLast())[name.lastOrNull()!!] = node
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public operator fun set(name: Name, node: DataNode<T>): Unit = set(name, node.builder())
|
|
||||||
|
|
||||||
public operator fun set(name: Name, item: DataItem<T>): Unit = when (item) {
|
|
||||||
is DataItem.Node<T> -> set(name, item.node.builder())
|
|
||||||
is DataItem.Leaf<T> -> set(name, item.data)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Append data to node
|
|
||||||
*/
|
|
||||||
public infix fun String.put(data: Data<T>): Unit = set(toName(), data)
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Append node
|
|
||||||
*/
|
|
||||||
public infix fun String.put(node: DataNode<T>): Unit = set(toName(), node)
|
|
||||||
|
|
||||||
public infix fun String.put(item: DataItem<T>): Unit = set(toName(), item)
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Build and append node
|
|
||||||
*/
|
|
||||||
public infix fun String.put(block: DataTreeBuilder<T>.() -> Unit): Unit = set(toName(), DataTreeBuilder(type).apply(block))
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Update data with given node data and meta with node meta.
|
|
||||||
*/
|
|
||||||
public fun update(node: DataNode<T>) {
|
|
||||||
node.dataSequence().forEach {
|
|
||||||
//TODO check if the place is occupied
|
|
||||||
this[it.first] = it.second
|
|
||||||
}
|
|
||||||
meta.update(node.meta)
|
|
||||||
}
|
|
||||||
|
|
||||||
public fun meta(block: MetaBuilder.() -> Unit): MetaBuilder = meta.apply(block)
|
|
||||||
|
|
||||||
public fun meta(meta: Meta) {
|
|
||||||
this.meta = meta.builder()
|
|
||||||
}
|
|
||||||
|
|
||||||
public fun build(): DataTree<T> {
|
|
||||||
val resMap = map.mapValues { (_, value) ->
|
|
||||||
when (value) {
|
|
||||||
is DataTreeBuilderItem.Leaf -> DataItem.Leaf(value.value)
|
|
||||||
is DataTreeBuilderItem.Node -> DataItem.Node(value.tree.build())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return DataTree(type, resMap, meta.seal())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public fun <T : Any> DataTreeBuilder<T>.datum(name: Name, data: Data<T>) {
|
|
||||||
this[name] = data
|
|
||||||
}
|
|
||||||
|
|
||||||
public fun <T : Any> DataTreeBuilder<T>.datum(name: String, data: Data<T>) {
|
|
||||||
this[name.toName()] = data
|
|
||||||
}
|
|
||||||
|
|
||||||
public fun <T : Any> DataTreeBuilder<T>.static(name: Name, data: T, meta: Meta = Meta.EMPTY) {
|
|
||||||
this[name] = Data.static(data, meta)
|
|
||||||
}
|
|
||||||
|
|
||||||
public fun <T : Any> DataTreeBuilder<T>.static(name: Name, data: T, block: MetaBuilder.() -> Unit = {}) {
|
|
||||||
this[name] = Data.static(data, Meta(block))
|
|
||||||
}
|
|
||||||
|
|
||||||
public fun <T : Any> DataTreeBuilder<T>.static(name: String, data: T, block: MetaBuilder.() -> Unit = {}) {
|
|
||||||
this[name.toName()] = Data.static(data, Meta(block))
|
|
||||||
}
|
|
||||||
|
|
||||||
public fun <T : Any> DataTreeBuilder<T>.node(name: Name, node: DataNode<T>) {
|
|
||||||
this[name] = node
|
|
||||||
}
|
|
||||||
|
|
||||||
public fun <T : Any> DataTreeBuilder<T>.node(name: String, node: DataNode<T>) {
|
|
||||||
this[name.toName()] = node
|
|
||||||
}
|
|
||||||
|
|
||||||
public inline fun <reified T : Any> DataTreeBuilder<T>.node(name: Name, noinline block: DataTreeBuilder<T>.() -> Unit) {
|
|
||||||
this[name] = DataNode(T::class, block)
|
|
||||||
}
|
|
||||||
|
|
||||||
public inline fun <reified T : Any> DataTreeBuilder<T>.node(name: String, noinline block: DataTreeBuilder<T>.() -> Unit) {
|
|
||||||
this[name.toName()] = DataNode(T::class, block)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Generate a mutable builder from this node. Node content is not changed
|
|
||||||
*/
|
|
||||||
public fun <T : Any> DataNode<T>.builder(): DataTreeBuilder<T> = DataTreeBuilder(type).apply {
|
|
||||||
dataSequence().forEach { (name, data) -> this[name] = data }
|
|
||||||
}
|
|
||||||
|
|
||||||
public fun <T : Any> DataNode<T>.filter(predicate: (Name, Data<T>) -> Boolean): DataNode<T> = DataNode.invoke(type) {
|
|
||||||
dataSequence().forEach { (name, data) ->
|
|
||||||
if (predicate(name, data)) {
|
|
||||||
this[name] = data
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public fun <T : Any> DataNode<T>.first(): Data<T>? = dataSequence().first().second
|
|
@ -1,114 +0,0 @@
|
|||||||
package hep.dataforge.data
|
|
||||||
|
|
||||||
import hep.dataforge.meta.DFExperimental
|
|
||||||
import kotlinx.coroutines.*
|
|
||||||
import kotlin.coroutines.CoroutineContext
|
|
||||||
import kotlin.coroutines.EmptyCoroutineContext
|
|
||||||
|
|
||||||
public interface Goal<out T> {
|
|
||||||
public val dependencies: Collection<Goal<*>>
|
|
||||||
/**
|
|
||||||
* Returns current running coroutine if the goal is started
|
|
||||||
*/
|
|
||||||
public val result: Deferred<T>?
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get ongoing computation or start a new one.
|
|
||||||
* Does not guarantee thread safety. In case of multi-thread access, could create orphan computations.
|
|
||||||
*/
|
|
||||||
public fun CoroutineScope.startAsync(): Deferred<T>
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Reset the computation
|
|
||||||
*/
|
|
||||||
public fun reset()
|
|
||||||
|
|
||||||
public companion object
|
|
||||||
}
|
|
||||||
|
|
||||||
public suspend fun <T> Goal<T>.await(): T = coroutineScope { startAsync().await() }
|
|
||||||
|
|
||||||
public val Goal<*>.isComplete: Boolean get() = result?.isCompleted ?: false
|
|
||||||
|
|
||||||
public open class StaticGoal<T>(public val value: T) : Goal<T> {
|
|
||||||
override val dependencies: Collection<Goal<*>> get() = emptyList()
|
|
||||||
override val result: Deferred<T> = CompletableDeferred(value)
|
|
||||||
|
|
||||||
override fun CoroutineScope.startAsync(): Deferred<T> = result
|
|
||||||
|
|
||||||
override fun reset() {
|
|
||||||
//doNothing
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public open class DynamicGoal<T>(
|
|
||||||
private val coroutineContext: CoroutineContext = EmptyCoroutineContext,
|
|
||||||
override val dependencies: Collection<Goal<*>> = emptyList(),
|
|
||||||
public val block: suspend CoroutineScope.() -> T
|
|
||||||
) : Goal<T> {
|
|
||||||
|
|
||||||
final override var result: Deferred<T>? = null
|
|
||||||
private set
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get ongoing computation or start a new one.
|
|
||||||
* Does not guarantee thread safety. In case of multi-thread access, could create orphan computations.
|
|
||||||
*/
|
|
||||||
@DFExperimental
|
|
||||||
override fun CoroutineScope.startAsync(): Deferred<T> {
|
|
||||||
val startedDependencies = this@DynamicGoal.dependencies.map { goal ->
|
|
||||||
goal.run { startAsync() }
|
|
||||||
}
|
|
||||||
return result
|
|
||||||
?: async(this@DynamicGoal.coroutineContext + CoroutineMonitor() + Dependencies(startedDependencies)) {
|
|
||||||
startedDependencies.forEach { deferred ->
|
|
||||||
deferred.invokeOnCompletion { error ->
|
|
||||||
if (error != null) cancel(CancellationException("Dependency $deferred failed with error: ${error.message}"))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
block()
|
|
||||||
}.also { result = it }
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Reset the computation
|
|
||||||
*/
|
|
||||||
override fun reset() {
|
|
||||||
result?.cancel()
|
|
||||||
result = null
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Create a one-to-one goal based on existing goal
|
|
||||||
*/
|
|
||||||
public fun <T, R> Goal<T>.map(
|
|
||||||
coroutineContext: CoroutineContext = EmptyCoroutineContext,
|
|
||||||
block: suspend CoroutineScope.(T) -> R
|
|
||||||
): Goal<R> = DynamicGoal(coroutineContext, listOf(this)) {
|
|
||||||
block(await())
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Create a joining goal.
|
|
||||||
*/
|
|
||||||
public fun <T, R> Collection<Goal<T>>.reduce(
|
|
||||||
coroutineContext: CoroutineContext = EmptyCoroutineContext,
|
|
||||||
block: suspend CoroutineScope.(Collection<T>) -> R
|
|
||||||
): Goal<R> = DynamicGoal(coroutineContext, this) {
|
|
||||||
block(map { run { it.await() } })
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* A joining goal for a map
|
|
||||||
* @param K type of the map key
|
|
||||||
* @param T type of the input goal
|
|
||||||
* @param R type of the result goal
|
|
||||||
*/
|
|
||||||
public fun <K, T, R> Map<K, Goal<T>>.reduce(
|
|
||||||
coroutineContext: CoroutineContext = EmptyCoroutineContext,
|
|
||||||
block: suspend CoroutineScope.(Map<K, T>) -> R
|
|
||||||
): Goal<R> = DynamicGoal(coroutineContext, this.values) {
|
|
||||||
block(mapValues { it.value.await() })
|
|
||||||
}
|
|
||||||
|
|
@ -1,68 +0,0 @@
|
|||||||
/*
|
|
||||||
* Copyright 2015 Alexander Nozik.
|
|
||||||
*
|
|
||||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
* you may not use this file except in compliance with the License.
|
|
||||||
* You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing, software
|
|
||||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
* See the License for the specific language governing permissions and
|
|
||||||
* limitations under the License.
|
|
||||||
*/
|
|
||||||
package hep.dataforge.data
|
|
||||||
|
|
||||||
import hep.dataforge.meta.Meta
|
|
||||||
import hep.dataforge.meta.get
|
|
||||||
import hep.dataforge.meta.string
|
|
||||||
|
|
||||||
public interface GroupRule {
|
|
||||||
public operator fun <T : Any> invoke(node: DataNode<T>): Map<String, DataNode<T>>
|
|
||||||
|
|
||||||
public companion object{
|
|
||||||
/**
|
|
||||||
* Create grouping rule that creates groups for different values of value
|
|
||||||
* field with name [key]
|
|
||||||
*
|
|
||||||
* @param key
|
|
||||||
* @param defaultTagValue
|
|
||||||
* @return
|
|
||||||
*/
|
|
||||||
public fun byValue(key: String, defaultTagValue: String): GroupRule = object :
|
|
||||||
GroupRule {
|
|
||||||
override fun <T : Any> invoke(node: DataNode<T>): Map<String, DataNode<T>> {
|
|
||||||
val map = HashMap<String, DataTreeBuilder<T>>()
|
|
||||||
|
|
||||||
node.dataSequence().forEach { (name, data) ->
|
|
||||||
val tagValue = data.meta[key]?.string ?: defaultTagValue
|
|
||||||
map.getOrPut(tagValue) { DataNode.builder(node.type) }[name] = data
|
|
||||||
}
|
|
||||||
|
|
||||||
return map.mapValues { it.value.build() }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
// @ValueDef(key = "byValue", required = true, info = "The name of annotation value by which grouping should be made")
|
|
||||||
// @ValueDef(
|
|
||||||
// key = "defaultValue",
|
|
||||||
// def = "default",
|
|
||||||
// info = "Default value which should be used for content in which the grouping value is not presented"
|
|
||||||
// )
|
|
||||||
public fun byMeta(config: Meta): GroupRule {
|
|
||||||
//TODO expand grouping options
|
|
||||||
return config["byValue"]?.string?.let {
|
|
||||||
byValue(
|
|
||||||
it,
|
|
||||||
config["defaultValue"]?.string ?: "default"
|
|
||||||
)
|
|
||||||
}
|
|
||||||
?: object : GroupRule {
|
|
||||||
override fun <T : Any> invoke(node: DataNode<T>): Map<String, DataNode<T>> = mapOf("" to node)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,75 +0,0 @@
|
|||||||
package hep.dataforge.data
|
|
||||||
|
|
||||||
import hep.dataforge.meta.*
|
|
||||||
import hep.dataforge.names.Name
|
|
||||||
import kotlin.reflect.KClass
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Action environment includes data name, data meta and action configuration meta
|
|
||||||
*/
|
|
||||||
public data class ActionEnv(
|
|
||||||
val name: Name,
|
|
||||||
val meta: Meta,
|
|
||||||
val actionMeta: Meta
|
|
||||||
)
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Action environment
|
|
||||||
*/
|
|
||||||
@DFBuilder
|
|
||||||
public class MapActionBuilder<T, R>(public var name: Name, public var meta: MetaBuilder, public val actionMeta: Meta) {
|
|
||||||
public lateinit var result: suspend ActionEnv.(T) -> R
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Calculate the result of goal
|
|
||||||
*/
|
|
||||||
public fun result(f: suspend ActionEnv.(T) -> R) {
|
|
||||||
result = f;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
public class MapAction<T : Any, out R : Any>(
|
|
||||||
public val inputType: KClass<T>,
|
|
||||||
public val outputType: KClass<out R>,
|
|
||||||
private val block: MapActionBuilder<T, R>.() -> Unit
|
|
||||||
) : Action<T, R> {
|
|
||||||
|
|
||||||
override fun invoke(node: DataNode<T>, meta: Meta): DataNode<R> {
|
|
||||||
node.ensureType(inputType)
|
|
||||||
|
|
||||||
return DataNode.invoke(outputType) {
|
|
||||||
node.dataSequence().forEach { (name, data) ->
|
|
||||||
/*
|
|
||||||
* Creating a new environment for action using **old** name, old meta and task meta
|
|
||||||
*/
|
|
||||||
val env = ActionEnv(name, data.meta, meta)
|
|
||||||
|
|
||||||
//applying transformation from builder
|
|
||||||
val builder = MapActionBuilder<T, R>(
|
|
||||||
name,
|
|
||||||
data.meta.builder(), // using data meta
|
|
||||||
meta
|
|
||||||
).apply(block)
|
|
||||||
|
|
||||||
//getting new name
|
|
||||||
val newName = builder.name
|
|
||||||
|
|
||||||
//getting new meta
|
|
||||||
val newMeta = builder.meta.seal()
|
|
||||||
|
|
||||||
val newData = data.map(outputType, meta = newMeta) { builder.result(env, it) }
|
|
||||||
//setting the data node
|
|
||||||
this[newName] = newData
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public inline fun <reified T : Any, reified R : Any> DataNode<T>.map(
|
|
||||||
meta: Meta,
|
|
||||||
noinline action: MapActionBuilder<in T, out R>.() -> Unit
|
|
||||||
): DataNode<R> = MapAction(T::class, R::class, action).invoke(this, meta)
|
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -1,107 +0,0 @@
|
|||||||
package hep.dataforge.data
|
|
||||||
|
|
||||||
import hep.dataforge.meta.Meta
|
|
||||||
import hep.dataforge.meta.MetaBuilder
|
|
||||||
import hep.dataforge.names.Name
|
|
||||||
import hep.dataforge.names.toName
|
|
||||||
import kotlin.reflect.KClass
|
|
||||||
|
|
||||||
|
|
||||||
public class JoinGroup<T : Any, R : Any>(public var name: String, internal val node: DataNode<T>) {
|
|
||||||
|
|
||||||
public var meta: MetaBuilder = MetaBuilder()
|
|
||||||
|
|
||||||
public lateinit var result: suspend ActionEnv.(Map<Name, T>) -> R
|
|
||||||
|
|
||||||
public fun result(f: suspend ActionEnv.(Map<Name, T>) -> R) {
|
|
||||||
this.result = f;
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
public class ReduceGroupBuilder<T : Any, R : Any>(public val actionMeta: Meta) {
|
|
||||||
private val groupRules: MutableList<(DataNode<T>) -> List<JoinGroup<T, R>>> = ArrayList();
|
|
||||||
|
|
||||||
/**
|
|
||||||
* introduce grouping by value name
|
|
||||||
*/
|
|
||||||
public fun byValue(tag: String, defaultTag: String = "@default", action: JoinGroup<T, R>.() -> Unit) {
|
|
||||||
groupRules += { node ->
|
|
||||||
GroupRule.byValue(tag, defaultTag).invoke(node).map {
|
|
||||||
JoinGroup<T, R>(it.key, it.value).apply(action)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Add a single fixed group to grouping rules
|
|
||||||
*/
|
|
||||||
public fun group(groupName: String, filter: DataFilter, action: JoinGroup<T, R>.() -> Unit) {
|
|
||||||
groupRules += { node ->
|
|
||||||
listOf(
|
|
||||||
JoinGroup<T, R>(groupName, node.filter(filter)).apply(action)
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public fun group(groupName: String, filter: (Name, Data<T>) -> Boolean, action: JoinGroup<T, R>.() -> Unit) {
|
|
||||||
groupRules += { node ->
|
|
||||||
listOf(
|
|
||||||
JoinGroup<T, R>(groupName, node.filter(filter)).apply(action)
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Apply transformation to the whole node
|
|
||||||
*/
|
|
||||||
public fun result(resultName: String, f: suspend ActionEnv.(Map<Name, T>) -> R) {
|
|
||||||
groupRules += { node ->
|
|
||||||
listOf(JoinGroup<T, R>(resultName, node).apply { result(f) })
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
internal fun buildGroups(input: DataNode<T>): List<JoinGroup<T, R>> {
|
|
||||||
return groupRules.flatMap { it.invoke(input) }
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
|
||||||
* The same rules as for KPipe
|
|
||||||
*/
|
|
||||||
public class ReduceAction<T : Any, R : Any>(
|
|
||||||
public val inputType: KClass<T>,
|
|
||||||
public val outputType: KClass<out R>,
|
|
||||||
private val action: ReduceGroupBuilder<T, R>.() -> Unit
|
|
||||||
) : Action<T, R> {
|
|
||||||
|
|
||||||
override fun invoke(node: DataNode<T>, meta: Meta): DataNode<R> {
|
|
||||||
node.ensureType(inputType)
|
|
||||||
return DataNode.invoke(outputType) {
|
|
||||||
ReduceGroupBuilder<T, R>(meta).apply(action).buildGroups(node).forEach { group ->
|
|
||||||
|
|
||||||
//val laminate = Laminate(group.meta, meta)
|
|
||||||
|
|
||||||
val dataMap = group.node.dataSequence().associate { it }
|
|
||||||
|
|
||||||
val groupName: String = group.name
|
|
||||||
|
|
||||||
val groupMeta = group.meta
|
|
||||||
|
|
||||||
val env = ActionEnv(groupName.toName(), groupMeta, meta)
|
|
||||||
|
|
||||||
val res: DynamicData<R> = dataMap.reduce(
|
|
||||||
outputType,
|
|
||||||
meta = groupMeta
|
|
||||||
) { group.result.invoke(env, it) }
|
|
||||||
|
|
||||||
set(env.name, res)
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public operator fun <T> Map<Name, T>.get(name: String): T? = get(name.toName())
|
|
@ -1,64 +0,0 @@
|
|||||||
package hep.dataforge.data
|
|
||||||
|
|
||||||
import hep.dataforge.meta.Laminate
|
|
||||||
import hep.dataforge.meta.Meta
|
|
||||||
import hep.dataforge.meta.MetaBuilder
|
|
||||||
import hep.dataforge.meta.builder
|
|
||||||
import hep.dataforge.names.Name
|
|
||||||
import hep.dataforge.names.toName
|
|
||||||
import kotlin.collections.set
|
|
||||||
import kotlin.reflect.KClass
|
|
||||||
|
|
||||||
|
|
||||||
public class FragmentRule<T : Any, R : Any>(public val name: Name, public var meta: MetaBuilder) {
|
|
||||||
public lateinit var result: suspend (T) -> R
|
|
||||||
|
|
||||||
public fun result(f: suspend (T) -> R) {
|
|
||||||
result = f;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
public class SplitBuilder<T : Any, R : Any>(public val name: Name, public val meta: Meta) {
|
|
||||||
internal val fragments: MutableMap<Name, FragmentRule<T, R>.() -> Unit> = HashMap()
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Add new fragment building rule. If the framgent not defined, result won't be available even if it is present in the map
|
|
||||||
* @param name the name of a fragment
|
|
||||||
* @param rule the rule to transform fragment name and meta using
|
|
||||||
*/
|
|
||||||
public fun fragment(name: String, rule: FragmentRule<T, R>.() -> Unit) {
|
|
||||||
fragments[name.toName()] = rule
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public class SplitAction<T : Any, R : Any>(
|
|
||||||
public val inputType: KClass<T>,
|
|
||||||
public val outputType: KClass<out R>,
|
|
||||||
private val action: SplitBuilder<T, R>.() -> Unit
|
|
||||||
) : Action<T, R> {
|
|
||||||
|
|
||||||
override fun invoke(node: DataNode<T>, meta: Meta): DataNode<R> {
|
|
||||||
node.ensureType(inputType)
|
|
||||||
|
|
||||||
return DataNode.invoke(outputType) {
|
|
||||||
node.dataSequence().forEach { (name, data) ->
|
|
||||||
|
|
||||||
val laminate = Laminate(data.meta, meta)
|
|
||||||
|
|
||||||
val split = SplitBuilder<T, R>(name, data.meta).apply(action)
|
|
||||||
|
|
||||||
|
|
||||||
// apply individual fragment rules to result
|
|
||||||
split.fragments.forEach { (fragmentName, rule) ->
|
|
||||||
val env = FragmentRule<T, R>(fragmentName, laminate.builder())
|
|
||||||
|
|
||||||
rule(env)
|
|
||||||
|
|
||||||
val res = data.map(outputType, meta = env.meta) { env.result(it) }
|
|
||||||
set(env.name, res)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,73 +0,0 @@
|
|||||||
package hep.dataforge.data
|
|
||||||
|
|
||||||
import hep.dataforge.meta.Meta
|
|
||||||
import hep.dataforge.names.NameToken
|
|
||||||
import kotlinx.coroutines.CoroutineScope
|
|
||||||
import kotlinx.coroutines.Deferred
|
|
||||||
import kotlin.reflect.KClass
|
|
||||||
|
|
||||||
public fun <R : Any, T : R> Data<T>.upcast(type: KClass<out R>): Data<R> {
|
|
||||||
return object : Data<R> by this {
|
|
||||||
override val type: KClass<out R> = type
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Safe upcast a [Data] to a supertype
|
|
||||||
*/
|
|
||||||
public inline fun <reified R : Any, T : R> Data<T>.upcast(): Data<R> = upcast(R::class)
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Check if node could be safely cast to given class
|
|
||||||
*/
|
|
||||||
internal expect fun <R : Any> DataNode<*>.canCast(type: KClass<out R>): Boolean
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Check if data could be safely cast to given class
|
|
||||||
*/
|
|
||||||
internal expect fun <R : Any> Data<*>.canCast(type: KClass<out R>): Boolean
|
|
||||||
|
|
||||||
public fun <R : Any> DataItem<*>.canCast(type: KClass<out R>): Boolean = when (this) {
|
|
||||||
is DataItem.Node -> node.canCast(type)
|
|
||||||
is DataItem.Leaf -> data.canCast(type)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Unsafe cast of data node
|
|
||||||
*/
|
|
||||||
@Suppress("UNCHECKED_CAST")
|
|
||||||
public fun <R : Any> Data<*>.cast(type: KClass<out R>): Data<R> {
|
|
||||||
return object : Data<R> {
|
|
||||||
override val meta: Meta get() = this@cast.meta
|
|
||||||
override val dependencies: Collection<Goal<*>> get() = this@cast.dependencies
|
|
||||||
override val result: Deferred<R>? get() = this@cast.result as Deferred<R>
|
|
||||||
override fun CoroutineScope.startAsync(): Deferred<R> = this@cast.run { startAsync() as Deferred<R> }
|
|
||||||
override fun reset() = this@cast.reset()
|
|
||||||
override val type: KClass<out R> = type
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public inline fun <reified R : Any> Data<*>.cast(): Data<R> = cast(R::class)
|
|
||||||
|
|
||||||
@Suppress("UNCHECKED_CAST")
|
|
||||||
public fun <R : Any> DataNode<*>.cast(type: KClass<out R>): DataNode<R> {
|
|
||||||
return object : DataNode<R> {
|
|
||||||
override val meta: Meta get() = this@cast.meta
|
|
||||||
override val type: KClass<out R> = type
|
|
||||||
override val items: Map<NameToken, DataItem<R>> get() = this@cast.items as Map<NameToken, DataItem<R>>
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public inline fun <reified R : Any> DataNode<*>.cast(): DataNode<R> = cast(R::class)
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Check that node is compatible with given type meaning that each element could be cast to the type
|
|
||||||
*/
|
|
||||||
public fun <T : Any> DataNode<*>.ensureType(type: KClass<out T>) {
|
|
||||||
if (!canCast(type)) {
|
|
||||||
error("$type expected, but $type received")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
//expect fun <T : Any, R : Any> DataNode<T>.cast(type: KClass<out R>): DataNode<R>
|
|
@ -0,0 +1,65 @@
|
|||||||
|
package space.kscience.dataforge.actions
|
||||||
|
|
||||||
|
import kotlinx.coroutines.launch
|
||||||
|
import space.kscience.dataforge.data.*
|
||||||
|
import space.kscience.dataforge.meta.Meta
|
||||||
|
import space.kscience.dataforge.misc.DFInternal
|
||||||
|
import space.kscience.dataforge.names.Name
|
||||||
|
import space.kscience.dataforge.names.startsWith
|
||||||
|
import kotlin.reflect.KType
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Remove all values with keys starting with [name]
|
||||||
|
*/
|
||||||
|
internal fun MutableMap<Name, *>.removeWhatStartsWith(name: Name) {
|
||||||
|
val toRemove = keys.filter { it.startsWith(name) }
|
||||||
|
toRemove.forEach(::remove)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* An action that caches results on-demand and recalculates them on source push
|
||||||
|
*/
|
||||||
|
public abstract class AbstractAction<in T : Any, R : Any>(
|
||||||
|
public val outputType: KType,
|
||||||
|
) : Action<T, R> {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generate initial content of the output
|
||||||
|
*/
|
||||||
|
protected abstract fun DataSetBuilder<R>.generate(
|
||||||
|
data: DataSet<T>,
|
||||||
|
meta: Meta,
|
||||||
|
)
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Update part of the data set when given [updateKey] is triggered by the source
|
||||||
|
*/
|
||||||
|
protected open fun DataSourceBuilder<R>.update(
|
||||||
|
dataSet: DataSet<T>,
|
||||||
|
meta: Meta,
|
||||||
|
updateKey: Name,
|
||||||
|
) {
|
||||||
|
// By default, recalculate the whole dataset
|
||||||
|
generate(dataSet, meta)
|
||||||
|
}
|
||||||
|
|
||||||
|
@OptIn(DFInternal::class)
|
||||||
|
override fun execute(
|
||||||
|
dataSet: DataSet<T>,
|
||||||
|
meta: Meta,
|
||||||
|
): DataSet<R> = if (dataSet is DataSource) {
|
||||||
|
DataSource(outputType, dataSet){
|
||||||
|
generate(dataSet, meta)
|
||||||
|
|
||||||
|
launch {
|
||||||
|
dataSet.updates.collect { name ->
|
||||||
|
update(dataSet, meta, name)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
DataTree<R>(outputType) {
|
||||||
|
generate(dataSet, meta)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,40 @@
|
|||||||
|
package space.kscience.dataforge.actions
|
||||||
|
|
||||||
|
import space.kscience.dataforge.data.DataSet
|
||||||
|
import space.kscience.dataforge.meta.Meta
|
||||||
|
import space.kscience.dataforge.misc.DFExperimental
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A simple data transformation on a data node. Actions should avoid doing actual dependency evaluation in [execute].
|
||||||
|
*/
|
||||||
|
public interface Action<in T : Any, out R : Any> {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Transform the data in the node, producing a new node. By default, it is assumed that all calculations are lazy
|
||||||
|
* so not actual computation is started at this moment.
|
||||||
|
*/
|
||||||
|
public fun execute(dataSet: DataSet<T>, meta: Meta = Meta.EMPTY): DataSet<R>
|
||||||
|
|
||||||
|
public companion object
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Action composition. The result is terminal if one of its parts is terminal
|
||||||
|
*/
|
||||||
|
public infix fun <T : Any, I : Any, R : Any> Action<T, I>.then(action: Action<I, R>): Action<T, R> {
|
||||||
|
// TODO introduce composite action and add optimize by adding action to the list
|
||||||
|
return object : Action<T, R> {
|
||||||
|
|
||||||
|
override fun execute(
|
||||||
|
dataSet: DataSet<T>,
|
||||||
|
meta: Meta,
|
||||||
|
): DataSet<R> = action.execute(this@then.execute(dataSet, meta), meta)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@DFExperimental
|
||||||
|
public operator fun <T : Any, R : Any> Action<T, R>.invoke(
|
||||||
|
dataSet: DataSet<T>,
|
||||||
|
meta: Meta = Meta.EMPTY,
|
||||||
|
): DataSet<R> = execute(dataSet, meta)
|
||||||
|
|
@ -0,0 +1,106 @@
|
|||||||
|
package space.kscience.dataforge.actions
|
||||||
|
|
||||||
|
import space.kscience.dataforge.data.*
|
||||||
|
import space.kscience.dataforge.meta.Meta
|
||||||
|
import space.kscience.dataforge.meta.MutableMeta
|
||||||
|
import space.kscience.dataforge.meta.seal
|
||||||
|
import space.kscience.dataforge.meta.toMutableMeta
|
||||||
|
import space.kscience.dataforge.misc.DFBuilder
|
||||||
|
import space.kscience.dataforge.misc.DFExperimental
|
||||||
|
import space.kscience.dataforge.misc.DFInternal
|
||||||
|
import space.kscience.dataforge.names.Name
|
||||||
|
import kotlin.reflect.KType
|
||||||
|
import kotlin.reflect.typeOf
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Action environment includes data name, data meta and action configuration meta
|
||||||
|
*/
|
||||||
|
public data class ActionEnv(
|
||||||
|
val name: Name,
|
||||||
|
val meta: Meta,
|
||||||
|
val actionMeta: Meta,
|
||||||
|
)
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Action environment
|
||||||
|
*/
|
||||||
|
@DFBuilder
|
||||||
|
public class MapActionBuilder<T, R>(
|
||||||
|
public var name: Name,
|
||||||
|
public var meta: MutableMeta,
|
||||||
|
public val actionMeta: Meta,
|
||||||
|
@PublishedApi internal var outputType: KType,
|
||||||
|
) {
|
||||||
|
|
||||||
|
public lateinit var result: suspend ActionEnv.(T) -> R
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Set unsafe [outputType] for the resulting data. Be sure that it is correct.
|
||||||
|
*/
|
||||||
|
public fun <R1 : R> result(outputType: KType, f: suspend ActionEnv.(T) -> R1) {
|
||||||
|
this.outputType = outputType
|
||||||
|
result = f;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Calculate the result of goal
|
||||||
|
*/
|
||||||
|
public inline fun <reified R1 : R> result(noinline f: suspend ActionEnv.(T) -> R1) {
|
||||||
|
outputType = typeOf<R1>()
|
||||||
|
result = f;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@PublishedApi
|
||||||
|
internal class MapAction<in T : Any, R : Any>(
|
||||||
|
outputType: KType,
|
||||||
|
private val block: MapActionBuilder<T, R>.() -> Unit,
|
||||||
|
) : AbstractAction<T, R>(outputType) {
|
||||||
|
|
||||||
|
private fun DataSetBuilder<R>.mapOne(name: Name, data: Data<T>, meta: Meta) {
|
||||||
|
// Creating a new environment for action using **old** name, old meta and task meta
|
||||||
|
val env = ActionEnv(name, data.meta, meta)
|
||||||
|
|
||||||
|
//applying transformation from builder
|
||||||
|
val builder = MapActionBuilder<T, R>(
|
||||||
|
name,
|
||||||
|
data.meta.toMutableMeta(), // using data meta
|
||||||
|
meta,
|
||||||
|
outputType
|
||||||
|
).apply(block)
|
||||||
|
|
||||||
|
//getting new name
|
||||||
|
val newName = builder.name
|
||||||
|
|
||||||
|
//getting new meta
|
||||||
|
val newMeta = builder.meta.seal()
|
||||||
|
|
||||||
|
@OptIn(DFInternal::class)
|
||||||
|
val newData = Data(builder.outputType, newMeta, dependencies = listOf(data)) {
|
||||||
|
builder.result(env, data.await())
|
||||||
|
}
|
||||||
|
//setting the data node
|
||||||
|
data(newName, newData)
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun DataSetBuilder<R>.generate(data: DataSet<T>, meta: Meta) {
|
||||||
|
data.forEach { mapOne(it.name, it.data, meta) }
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun DataSourceBuilder<R>.update(dataSet: DataSet<T>, meta: Meta, updateKey: Name) {
|
||||||
|
remove(updateKey)
|
||||||
|
dataSet[updateKey]?.let { mapOne(updateKey, it, meta) }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A one-to-one mapping action
|
||||||
|
*/
|
||||||
|
@DFExperimental
|
||||||
|
@Suppress("FunctionName")
|
||||||
|
public inline fun <T : Any, reified R : Any> Action.Companion.map(
|
||||||
|
noinline builder: MapActionBuilder<T, R>.() -> Unit,
|
||||||
|
): Action<T, R> = MapAction(typeOf<R>(), builder)
|
||||||
|
|
||||||
|
|
@ -0,0 +1,117 @@
|
|||||||
|
package space.kscience.dataforge.actions
|
||||||
|
|
||||||
|
import space.kscience.dataforge.data.*
|
||||||
|
import space.kscience.dataforge.meta.Meta
|
||||||
|
import space.kscience.dataforge.meta.MutableMeta
|
||||||
|
import space.kscience.dataforge.misc.DFBuilder
|
||||||
|
import space.kscience.dataforge.misc.DFExperimental
|
||||||
|
import space.kscience.dataforge.misc.DFInternal
|
||||||
|
import space.kscience.dataforge.names.Name
|
||||||
|
import space.kscience.dataforge.names.parseAsName
|
||||||
|
import kotlin.reflect.KType
|
||||||
|
import kotlin.reflect.typeOf
|
||||||
|
|
||||||
|
|
||||||
|
public class JoinGroup<T : Any, R : Any>(
|
||||||
|
public var name: String,
|
||||||
|
internal val set: DataSet<T>,
|
||||||
|
@PublishedApi internal var outputType: KType,
|
||||||
|
) {
|
||||||
|
|
||||||
|
public var meta: MutableMeta = MutableMeta()
|
||||||
|
|
||||||
|
public lateinit var result: suspend ActionEnv.(Map<Name, ValueWithMeta<T>>) -> R
|
||||||
|
|
||||||
|
internal fun <R1 : R> result(outputType: KType, f: suspend ActionEnv.(Map<Name, ValueWithMeta<T>>) -> R1) {
|
||||||
|
this.outputType = outputType
|
||||||
|
this.result = f;
|
||||||
|
}
|
||||||
|
|
||||||
|
public inline fun <reified R1 : R> result(noinline f: suspend ActionEnv.(Map<Name, ValueWithMeta<T>>) -> R1) {
|
||||||
|
outputType = typeOf<R1>()
|
||||||
|
this.result = f;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@DFBuilder
|
||||||
|
public class ReduceGroupBuilder<T : Any, R : Any>(
|
||||||
|
public val actionMeta: Meta,
|
||||||
|
private val outputType: KType,
|
||||||
|
) {
|
||||||
|
private val groupRules: MutableList<(DataSet<T>) -> List<JoinGroup<T, R>>> = ArrayList();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* introduce grouping by meta value
|
||||||
|
*/
|
||||||
|
public fun byValue(tag: String, defaultTag: String = "@default", action: JoinGroup<T, R>.() -> Unit) {
|
||||||
|
groupRules += { node ->
|
||||||
|
GroupRule.byMetaValue(tag, defaultTag).gather(node).map {
|
||||||
|
JoinGroup<T, R>(it.key, it.value, outputType).apply(action)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public fun group(
|
||||||
|
groupName: String,
|
||||||
|
predicate: (Name, Meta) -> Boolean,
|
||||||
|
action: JoinGroup<T, R>.() -> Unit,
|
||||||
|
) {
|
||||||
|
groupRules += { source ->
|
||||||
|
listOf(
|
||||||
|
JoinGroup<T, R>(groupName, source.filter(predicate), outputType).apply(action)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Apply transformation to the whole node
|
||||||
|
*/
|
||||||
|
public fun result(resultName: String, f: suspend ActionEnv.(Map<Name, ValueWithMeta<T>>) -> R) {
|
||||||
|
groupRules += { node ->
|
||||||
|
listOf(JoinGroup<T, R>(resultName, node, outputType).apply { result(outputType, f) })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
internal fun buildGroups(input: DataSet<T>): List<JoinGroup<T, R>> =
|
||||||
|
groupRules.flatMap { it.invoke(input) }
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@PublishedApi
|
||||||
|
internal class ReduceAction<T : Any, R : Any>(
|
||||||
|
outputType: KType,
|
||||||
|
private val action: ReduceGroupBuilder<T, R>.() -> Unit,
|
||||||
|
) : AbstractAction<T, R>(outputType) {
|
||||||
|
//TODO optimize reduction. Currently, the whole action recalculates on push
|
||||||
|
|
||||||
|
override fun DataSetBuilder<R>.generate(data: DataSet<T>, meta: Meta) {
|
||||||
|
ReduceGroupBuilder<T, R>(meta, outputType).apply(action).buildGroups(data).forEach { group ->
|
||||||
|
val dataFlow: Map<Name, Data<T>> = group.set.asSequence().fold(HashMap()) { acc, value ->
|
||||||
|
acc.apply {
|
||||||
|
acc[value.name] = value.data
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
val groupName: String = group.name
|
||||||
|
|
||||||
|
val groupMeta = group.meta
|
||||||
|
|
||||||
|
val env = ActionEnv(groupName.parseAsName(), groupMeta, meta)
|
||||||
|
@OptIn(DFInternal::class) val res: Data<R> = dataFlow.reduceToData(
|
||||||
|
group.outputType,
|
||||||
|
meta = groupMeta
|
||||||
|
) { group.result.invoke(env, it) }
|
||||||
|
|
||||||
|
data(env.name, res)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A one-to-one mapping action
|
||||||
|
*/
|
||||||
|
@DFExperimental
|
||||||
|
public inline fun <reified T : Any, reified R : Any> Action.Companion.reduce(
|
||||||
|
noinline builder: ReduceGroupBuilder<T, R>.() -> Unit,
|
||||||
|
): Action<T, R> = ReduceAction(typeOf<R>(), builder)
|
@ -0,0 +1,92 @@
|
|||||||
|
package space.kscience.dataforge.actions
|
||||||
|
|
||||||
|
import space.kscience.dataforge.data.*
|
||||||
|
import space.kscience.dataforge.meta.Laminate
|
||||||
|
import space.kscience.dataforge.meta.Meta
|
||||||
|
import space.kscience.dataforge.meta.MutableMeta
|
||||||
|
import space.kscience.dataforge.meta.toMutableMeta
|
||||||
|
import space.kscience.dataforge.misc.DFExperimental
|
||||||
|
import space.kscience.dataforge.names.Name
|
||||||
|
import space.kscience.dataforge.names.parseAsName
|
||||||
|
import kotlin.collections.set
|
||||||
|
import kotlin.reflect.KType
|
||||||
|
import kotlin.reflect.typeOf
|
||||||
|
|
||||||
|
|
||||||
|
public class SplitBuilder<T : Any, R : Any>(public val name: Name, public val meta: Meta) {
|
||||||
|
|
||||||
|
public class FragmentRule<T : Any, R : Any>(
|
||||||
|
public val name: Name,
|
||||||
|
public var meta: MutableMeta,
|
||||||
|
@PublishedApi internal var outputType: KType,
|
||||||
|
) {
|
||||||
|
public lateinit var result: suspend (T) -> R
|
||||||
|
|
||||||
|
public inline fun <reified R1 : R> result(noinline f: suspend (T) -> R1) {
|
||||||
|
this.outputType = typeOf<R1>()
|
||||||
|
result = f;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
internal val fragments: MutableMap<Name, FragmentRule<T, R>.() -> Unit> = HashMap()
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Add new fragment building rule. If the framgent not defined, result won't be available even if it is present in the map
|
||||||
|
* @param name the name of a fragment
|
||||||
|
* @param rule the rule to transform fragment name and meta using
|
||||||
|
*/
|
||||||
|
public fun fragment(name: String, rule: FragmentRule<T, R>.() -> Unit) {
|
||||||
|
fragments[name.parseAsName()] = rule
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Action that splits each incoming element into a number of fragments defined in builder
|
||||||
|
*/
|
||||||
|
@PublishedApi
|
||||||
|
internal class SplitAction<T : Any, R : Any>(
|
||||||
|
outputType: KType,
|
||||||
|
private val action: SplitBuilder<T, R>.() -> Unit,
|
||||||
|
) : AbstractAction<T, R>(outputType) {
|
||||||
|
|
||||||
|
private fun DataSetBuilder<R>.splitOne(name: Name, data: Data<T>, meta: Meta) {
|
||||||
|
val laminate = Laminate(data.meta, meta)
|
||||||
|
|
||||||
|
val split = SplitBuilder<T, R>(name, data.meta).apply(action)
|
||||||
|
|
||||||
|
|
||||||
|
// apply individual fragment rules to result
|
||||||
|
split.fragments.forEach { (fragmentName, rule) ->
|
||||||
|
val env = SplitBuilder.FragmentRule<T, R>(
|
||||||
|
fragmentName,
|
||||||
|
laminate.toMutableMeta(),
|
||||||
|
outputType
|
||||||
|
).apply(rule)
|
||||||
|
//data.map<R>(outputType, meta = env.meta) { env.result(it) }.named(fragmentName)
|
||||||
|
|
||||||
|
data(
|
||||||
|
fragmentName,
|
||||||
|
@Suppress("OPT_IN_USAGE") Data(outputType, meta = env.meta, dependencies = listOf(data)) {
|
||||||
|
env.result(data.await())
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun DataSetBuilder<R>.generate(data: DataSet<T>, meta: Meta) {
|
||||||
|
data.forEach { splitOne(it.name, it.data, meta) }
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun DataSourceBuilder<R>.update(dataSet: DataSet<T>, meta: Meta, updateKey: Name) {
|
||||||
|
remove(updateKey)
|
||||||
|
dataSet[updateKey]?.let { splitOne(updateKey, it, meta) }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Action that splits each incoming element into a number of fragments defined in builder
|
||||||
|
*/
|
||||||
|
@DFExperimental
|
||||||
|
public inline fun <T : Any, reified R : Any> Action.Companion.split(
|
||||||
|
noinline builder: SplitBuilder<T, R>.() -> Unit,
|
||||||
|
): Action<T, R> = SplitAction(typeOf<R>(), builder)
|
@ -1,12 +1,11 @@
|
|||||||
package hep.dataforge.data
|
package space.kscience.dataforge.data
|
||||||
|
|
||||||
import hep.dataforge.meta.DFExperimental
|
|
||||||
import kotlinx.coroutines.CoroutineScope
|
import kotlinx.coroutines.CoroutineScope
|
||||||
import kotlinx.coroutines.Job
|
import kotlinx.coroutines.Job
|
||||||
|
import space.kscience.dataforge.misc.DFExperimental
|
||||||
import kotlin.coroutines.CoroutineContext
|
import kotlin.coroutines.CoroutineContext
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A monitor of goal state that could be accessed only form inside the goal
|
* A monitor of goal state that could be accessed only form inside the goal
|
||||||
*/
|
*/
|
||||||
@ -49,9 +48,9 @@ public val CoroutineScope.monitor: CoroutineMonitor? get() = coroutineContext.mo
|
|||||||
public val Job.dependencies: Collection<Job> get() = this[Dependencies]?.values ?: emptyList()
|
public val Job.dependencies: Collection<Job> get() = this[Dependencies]?.values ?: emptyList()
|
||||||
|
|
||||||
@DFExperimental
|
@DFExperimental
|
||||||
public val Job.totalWork: Double get() = dependencies.sumByDouble { totalWork } + (monitor?.totalWork ?: 0.0)
|
public val Job.totalWork: Double get() = dependencies.sumOf { totalWork } + (monitor?.totalWork ?: 0.0)
|
||||||
@DFExperimental
|
@DFExperimental
|
||||||
public val Job.workDone: Double get() = dependencies.sumByDouble { workDone } + (monitor?.workDone ?: 0.0)
|
public val Job.workDone: Double get() = dependencies.sumOf { workDone } + (monitor?.workDone ?: 0.0)
|
||||||
@DFExperimental
|
@DFExperimental
|
||||||
public val Job.status: String get() = monitor?.status ?: ""
|
public val Job.status: String get() = monitor?.status ?: ""
|
||||||
@DFExperimental
|
@DFExperimental
|
@ -0,0 +1,107 @@
|
|||||||
|
package space.kscience.dataforge.data
|
||||||
|
|
||||||
|
import kotlinx.coroutines.*
|
||||||
|
import space.kscience.dataforge.meta.Meta
|
||||||
|
import space.kscience.dataforge.meta.MetaRepr
|
||||||
|
import space.kscience.dataforge.meta.isEmpty
|
||||||
|
import space.kscience.dataforge.misc.DFInternal
|
||||||
|
import space.kscience.dataforge.misc.DfId
|
||||||
|
import kotlin.coroutines.CoroutineContext
|
||||||
|
import kotlin.coroutines.EmptyCoroutineContext
|
||||||
|
import kotlin.reflect.KType
|
||||||
|
import kotlin.reflect.typeOf
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A data element characterized by its meta
|
||||||
|
*/
|
||||||
|
@DfId(Data.TYPE)
|
||||||
|
public interface Data<out T> : Goal<T>, MetaRepr {
|
||||||
|
/**
|
||||||
|
* Type marker for the data. The type is known before the calculation takes place so it could be checked.
|
||||||
|
*/
|
||||||
|
public val type: KType
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Meta for the data
|
||||||
|
*/
|
||||||
|
public val meta: Meta
|
||||||
|
|
||||||
|
override fun toMeta(): Meta = Meta {
|
||||||
|
"type" put (type.toString())
|
||||||
|
if (!meta.isEmpty()) {
|
||||||
|
"meta" put meta
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public companion object {
|
||||||
|
public const val TYPE: String = "data"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The type that can't have any subtypes
|
||||||
|
*/
|
||||||
|
internal val TYPE_OF_NOTHING: KType = typeOf<Unit>()
|
||||||
|
|
||||||
|
public inline fun <reified T : Any> static(
|
||||||
|
value: T,
|
||||||
|
meta: Meta = Meta.EMPTY,
|
||||||
|
): Data<T> = StaticData(typeOf<T>(), value, meta)
|
||||||
|
|
||||||
|
/**
|
||||||
|
* An empty data containing only meta
|
||||||
|
*/
|
||||||
|
@OptIn(DelicateCoroutinesApi::class)
|
||||||
|
public fun empty(meta: Meta): Data<Nothing> = object : Data<Nothing> {
|
||||||
|
override val type: KType = TYPE_OF_NOTHING
|
||||||
|
override val meta: Meta = meta
|
||||||
|
override val dependencies: Collection<Goal<*>> = emptyList()
|
||||||
|
override val deferred: Deferred<Nothing>
|
||||||
|
get() = GlobalScope.async(start = CoroutineStart.LAZY) {
|
||||||
|
error("The Data is empty and could not be computed")
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun async(coroutineScope: CoroutineScope): Deferred<Nothing> = deferred
|
||||||
|
override fun reset() {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A lazily computed variant of [Data] based on [LazyGoal]
|
||||||
|
* One must ensure that proper [type] is used so this method should not be used
|
||||||
|
*/
|
||||||
|
private class LazyData<T : Any>(
|
||||||
|
override val type: KType,
|
||||||
|
override val meta: Meta = Meta.EMPTY,
|
||||||
|
additionalContext: CoroutineContext = EmptyCoroutineContext,
|
||||||
|
dependencies: Collection<Goal<*>> = emptyList(),
|
||||||
|
block: suspend () -> T,
|
||||||
|
) : Data<T>, LazyGoal<T>(additionalContext, dependencies, block)
|
||||||
|
|
||||||
|
public class StaticData<T : Any>(
|
||||||
|
override val type: KType,
|
||||||
|
value: T,
|
||||||
|
override val meta: Meta = Meta.EMPTY,
|
||||||
|
) : Data<T>, StaticGoal<T>(value)
|
||||||
|
|
||||||
|
@Suppress("FunctionName")
|
||||||
|
public inline fun <reified T : Any> Data(value: T, meta: Meta = Meta.EMPTY): StaticData<T> =
|
||||||
|
StaticData(typeOf<T>(), value, meta)
|
||||||
|
|
||||||
|
@Suppress("FunctionName")
|
||||||
|
@DFInternal
|
||||||
|
public fun <T : Any> Data(
|
||||||
|
type: KType,
|
||||||
|
meta: Meta = Meta.EMPTY,
|
||||||
|
context: CoroutineContext = EmptyCoroutineContext,
|
||||||
|
dependencies: Collection<Goal<*>> = emptyList(),
|
||||||
|
block: suspend () -> T,
|
||||||
|
): Data<T> = LazyData(type, meta, context, dependencies, block)
|
||||||
|
|
||||||
|
@OptIn(DFInternal::class)
|
||||||
|
@Suppress("FunctionName")
|
||||||
|
public inline fun <reified T : Any> Data(
|
||||||
|
meta: Meta = Meta.EMPTY,
|
||||||
|
context: CoroutineContext = EmptyCoroutineContext,
|
||||||
|
dependencies: Collection<Goal<*>> = emptyList(),
|
||||||
|
noinline block: suspend () -> T,
|
||||||
|
): Data<T> = Data(typeOf<T>(), meta, context, dependencies, block)
|
@ -0,0 +1,124 @@
|
|||||||
|
package space.kscience.dataforge.data
|
||||||
|
|
||||||
|
import kotlinx.coroutines.*
|
||||||
|
import kotlinx.coroutines.flow.Flow
|
||||||
|
import kotlinx.coroutines.flow.emptyFlow
|
||||||
|
import kotlinx.coroutines.flow.mapNotNull
|
||||||
|
import space.kscience.dataforge.data.Data.Companion.TYPE_OF_NOTHING
|
||||||
|
import space.kscience.dataforge.meta.Meta
|
||||||
|
import space.kscience.dataforge.names.Name
|
||||||
|
import space.kscience.dataforge.names.asName
|
||||||
|
import space.kscience.dataforge.names.endsWith
|
||||||
|
import space.kscience.dataforge.names.parseAsName
|
||||||
|
import kotlin.reflect.KType
|
||||||
|
|
||||||
|
public interface DataSet<out T : Any> {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The minimal common ancestor to all data in the node
|
||||||
|
*/
|
||||||
|
public val dataType: KType
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Meta-data associated with this node. If no meta is provided, returns [Meta.EMPTY].
|
||||||
|
*/
|
||||||
|
public val meta: Meta
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Traverse this [DataSet] returning named data instances. The order is not guaranteed.
|
||||||
|
*/
|
||||||
|
public operator fun iterator(): Iterator<NamedData<T>>
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get data with given name.
|
||||||
|
*/
|
||||||
|
public operator fun get(name: Name): Data<T>?
|
||||||
|
|
||||||
|
public companion object {
|
||||||
|
public val META_KEY: Name = "@meta".asName()
|
||||||
|
|
||||||
|
/**
|
||||||
|
* An empty [DataSet] that suits all types
|
||||||
|
*/
|
||||||
|
public val EMPTY: DataSet<Nothing> = object : DataSet<Nothing> {
|
||||||
|
override val dataType: KType = TYPE_OF_NOTHING
|
||||||
|
override val meta: Meta get() = Meta.EMPTY
|
||||||
|
|
||||||
|
override fun iterator(): Iterator<NamedData<Nothing>> = emptySequence<NamedData<Nothing>>().iterator()
|
||||||
|
|
||||||
|
override fun get(name: Name): Data<Nothing>? = null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public fun <T : Any> DataSet<T>.asSequence(): Sequence<NamedData<T>> = object : Sequence<NamedData<T>> {
|
||||||
|
override fun iterator(): Iterator<NamedData<T>> = this@asSequence.iterator()
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Return a single [Data] in this [DataSet]. Throw error if it is not single.
|
||||||
|
*/
|
||||||
|
public fun <T : Any> DataSet<T>.single(): NamedData<T> = asSequence().single()
|
||||||
|
|
||||||
|
public fun <T : Any> DataSet<T>.asIterable(): Iterable<NamedData<T>> = object : Iterable<NamedData<T>> {
|
||||||
|
override fun iterator(): Iterator<NamedData<T>> = this@asIterable.iterator()
|
||||||
|
}
|
||||||
|
|
||||||
|
public operator fun <T : Any> DataSet<T>.get(name: String): Data<T>? = get(name.parseAsName())
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A [DataSet] with propagated updates.
|
||||||
|
*/
|
||||||
|
public interface DataSource<out T : Any> : DataSet<T>, CoroutineScope {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A flow of updated item names. Updates are propagated in a form of [Flow] of names of updated nodes.
|
||||||
|
* Those can include new data items and replacement of existing ones. The replaced items could update existing data content
|
||||||
|
* and replace it completely, so they should be pulled again.
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
public val updates: Flow<Name>
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Stop generating updates from this [DataSource]
|
||||||
|
*/
|
||||||
|
public fun close() {
|
||||||
|
coroutineContext[Job]?.cancel()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public val <T : Any> DataSet<T>.updates: Flow<Name> get() = if (this is DataSource) updates else emptyFlow()
|
||||||
|
//
|
||||||
|
///**
|
||||||
|
// * Flow all data nodes with names starting with [branchName]
|
||||||
|
// */
|
||||||
|
//public fun <T : Any> DataSet<T>.children(branchName: Name): Sequence<NamedData<T>> =
|
||||||
|
// this@children.asSequence().filter {
|
||||||
|
// it.name.startsWith(branchName)
|
||||||
|
// }
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Start computation for all goals in data node and return a job for the whole node
|
||||||
|
*/
|
||||||
|
public fun <T : Any> DataSet<T>.startAll(coroutineScope: CoroutineScope): Job = coroutineScope.launch {
|
||||||
|
asIterable().map {
|
||||||
|
it.launch(this@launch)
|
||||||
|
}.joinAll()
|
||||||
|
}
|
||||||
|
|
||||||
|
public suspend fun <T : Any> DataSet<T>.computeAndJoinAll(): Unit = coroutineScope { startAll(this).join() }
|
||||||
|
|
||||||
|
public fun DataSet<*>.toMeta(): Meta = Meta {
|
||||||
|
forEach {
|
||||||
|
if (it.name.endsWith(DataSet.META_KEY)) {
|
||||||
|
set(it.name, it.meta)
|
||||||
|
} else {
|
||||||
|
it.name put {
|
||||||
|
"type" put it.type.toString()
|
||||||
|
"meta" put it.meta
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public val <T : Any> DataSet<T>.updatesWithData: Flow<NamedData<T>> get() = updates.mapNotNull { get(it)?.named(it) }
|
@ -0,0 +1,165 @@
|
|||||||
|
package space.kscience.dataforge.data
|
||||||
|
|
||||||
|
import space.kscience.dataforge.meta.Meta
|
||||||
|
import space.kscience.dataforge.meta.MutableMeta
|
||||||
|
import space.kscience.dataforge.misc.DFExperimental
|
||||||
|
import space.kscience.dataforge.names.Name
|
||||||
|
import space.kscience.dataforge.names.isEmpty
|
||||||
|
import space.kscience.dataforge.names.plus
|
||||||
|
import kotlin.reflect.KType
|
||||||
|
|
||||||
|
public interface DataSetBuilder<in T : Any> {
|
||||||
|
public val dataType: KType
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Remove all data items starting with [name]
|
||||||
|
*/
|
||||||
|
public fun remove(name: Name)
|
||||||
|
|
||||||
|
public fun data(name: Name, data: Data<T>?)
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Set a current state of given [dataSet] into a branch [name]. Does not propagate updates
|
||||||
|
*/
|
||||||
|
public fun node(name: Name, dataSet: DataSet<T>) {
|
||||||
|
//remove previous items
|
||||||
|
if (name != Name.EMPTY) {
|
||||||
|
remove(name)
|
||||||
|
}
|
||||||
|
|
||||||
|
//Set new items
|
||||||
|
dataSet.forEach {
|
||||||
|
data(name + it.name, it.data)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Set meta for the given node
|
||||||
|
*/
|
||||||
|
public fun meta(name: Name, meta: Meta)
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Define meta in this [DataSet]
|
||||||
|
*/
|
||||||
|
public fun <T : Any> DataSetBuilder<T>.meta(value: Meta): Unit = meta(Name.EMPTY, value)
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Define meta in this [DataSet]
|
||||||
|
*/
|
||||||
|
public fun <T : Any> DataSetBuilder<T>.meta(mutableMeta: MutableMeta.() -> Unit): Unit = meta(Meta(mutableMeta))
|
||||||
|
|
||||||
|
@PublishedApi
|
||||||
|
internal class SubSetBuilder<in T : Any>(
|
||||||
|
private val parent: DataSetBuilder<T>,
|
||||||
|
private val branch: Name,
|
||||||
|
) : DataSetBuilder<T> {
|
||||||
|
override val dataType: KType get() = parent.dataType
|
||||||
|
|
||||||
|
override fun remove(name: Name) {
|
||||||
|
parent.remove(branch + name)
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun data(name: Name, data: Data<T>?) {
|
||||||
|
parent.data(branch + name, data)
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun node(name: Name, dataSet: DataSet<T>) {
|
||||||
|
parent.node(branch + name, dataSet)
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun meta(name: Name, meta: Meta) {
|
||||||
|
parent.meta(branch + name, meta)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public inline fun <T : Any> DataSetBuilder<T>.node(
|
||||||
|
name: Name,
|
||||||
|
crossinline block: DataSetBuilder<T>.() -> Unit,
|
||||||
|
) {
|
||||||
|
if (name.isEmpty()) block() else SubSetBuilder(this, name).block()
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
public fun <T : Any> DataSetBuilder<T>.data(name: String, value: Data<T>) {
|
||||||
|
data(Name.parse(name), value)
|
||||||
|
}
|
||||||
|
|
||||||
|
public fun <T : Any> DataSetBuilder<T>.node(name: String, set: DataSet<T>) {
|
||||||
|
node(Name.parse(name), set)
|
||||||
|
}
|
||||||
|
|
||||||
|
public inline fun <T : Any> DataSetBuilder<T>.node(
|
||||||
|
name: String,
|
||||||
|
crossinline block: DataSetBuilder<T>.() -> Unit,
|
||||||
|
): Unit = node(Name.parse(name), block)
|
||||||
|
|
||||||
|
public fun <T : Any> DataSetBuilder<T>.set(value: NamedData<T>) {
|
||||||
|
data(value.name, value.data)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Produce lazy [Data] and emit it into the [DataSetBuilder]
|
||||||
|
*/
|
||||||
|
public inline fun <reified T : Any> DataSetBuilder<T>.produce(
|
||||||
|
name: String,
|
||||||
|
meta: Meta = Meta.EMPTY,
|
||||||
|
noinline producer: suspend () -> T,
|
||||||
|
) {
|
||||||
|
val data = Data(meta, block = producer)
|
||||||
|
data(name, data)
|
||||||
|
}
|
||||||
|
|
||||||
|
public inline fun <reified T : Any> DataSetBuilder<T>.produce(
|
||||||
|
name: Name,
|
||||||
|
meta: Meta = Meta.EMPTY,
|
||||||
|
noinline producer: suspend () -> T,
|
||||||
|
) {
|
||||||
|
val data = Data(meta, block = producer)
|
||||||
|
data(name, data)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Emit a static data with the fixed value
|
||||||
|
*/
|
||||||
|
public inline fun <reified T : Any> DataSetBuilder<T>.static(
|
||||||
|
name: String,
|
||||||
|
data: T,
|
||||||
|
meta: Meta = Meta.EMPTY,
|
||||||
|
): Unit = data(name, Data.static(data, meta))
|
||||||
|
|
||||||
|
public inline fun <reified T : Any> DataSetBuilder<T>.static(
|
||||||
|
name: Name,
|
||||||
|
data: T,
|
||||||
|
meta: Meta = Meta.EMPTY,
|
||||||
|
): Unit = data(name, Data.static(data, meta))
|
||||||
|
|
||||||
|
public inline fun <reified T : Any> DataSetBuilder<T>.static(
|
||||||
|
name: String,
|
||||||
|
data: T,
|
||||||
|
mutableMeta: MutableMeta.() -> Unit,
|
||||||
|
): Unit = data(Name.parse(name), Data.static(data, Meta(mutableMeta)))
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Update data with given node data and meta with node meta.
|
||||||
|
*/
|
||||||
|
@DFExperimental
|
||||||
|
public fun <T : Any> DataSetBuilder<T>.populateFrom(tree: DataSet<T>): Unit {
|
||||||
|
tree.forEach {
|
||||||
|
//TODO check if the place is occupied
|
||||||
|
data(it.name, it.data)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
//public fun <T : Any> DataSetBuilder<T>.populateFrom(flow: Flow<NamedData<T>>) {
|
||||||
|
// flow.collect {
|
||||||
|
// data(it.name, it.data)
|
||||||
|
// }
|
||||||
|
//}
|
||||||
|
|
||||||
|
public fun <T : Any> DataSetBuilder<T>.populateFrom(sequence: Sequence<NamedData<T>>) {
|
||||||
|
sequence.forEach {
|
||||||
|
data(it.name, it.data)
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,119 @@
|
|||||||
|
package space.kscience.dataforge.data
|
||||||
|
|
||||||
|
import space.kscience.dataforge.meta.Meta
|
||||||
|
import space.kscience.dataforge.misc.DFInternal
|
||||||
|
import space.kscience.dataforge.misc.DfId
|
||||||
|
import space.kscience.dataforge.names.*
|
||||||
|
import kotlin.collections.component1
|
||||||
|
import kotlin.collections.component2
|
||||||
|
import kotlin.reflect.KType
|
||||||
|
import kotlin.reflect.typeOf
|
||||||
|
|
||||||
|
public sealed class DataTreeItem<out T : Any> {
|
||||||
|
|
||||||
|
public abstract val meta: Meta
|
||||||
|
|
||||||
|
public class Node<out T : Any>(public val tree: DataTree<T>) : DataTreeItem<T>() {
|
||||||
|
override val meta: Meta get() = tree.meta
|
||||||
|
}
|
||||||
|
|
||||||
|
public class Leaf<out T : Any>(public val data: Data<T>) : DataTreeItem<T>() {
|
||||||
|
override val meta: Meta get() = data.meta
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public val <T : Any> DataTreeItem<T>.type: KType
|
||||||
|
get() = when (this) {
|
||||||
|
is DataTreeItem.Node -> tree.dataType
|
||||||
|
is DataTreeItem.Leaf -> data.type
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A tree-like [DataSet] grouped into the node. All data inside the node must inherit its type
|
||||||
|
*/
|
||||||
|
@DfId(DataTree.TYPE)
|
||||||
|
public interface DataTree<out T : Any> : DataSet<T> {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Top-level children items of this [DataTree]
|
||||||
|
*/
|
||||||
|
public val items: Map<NameToken, DataTreeItem<T>>
|
||||||
|
|
||||||
|
override val meta: Meta get() = items[META_ITEM_NAME_TOKEN]?.meta ?: Meta.EMPTY
|
||||||
|
|
||||||
|
override fun iterator(): Iterator<NamedData<T>> = iterator {
|
||||||
|
items.forEach { (token, childItem: DataTreeItem<T>) ->
|
||||||
|
if (!token.body.startsWith("@")) {
|
||||||
|
when (childItem) {
|
||||||
|
is DataTreeItem.Leaf -> yield(childItem.data.named(token.asName()))
|
||||||
|
is DataTreeItem.Node -> yieldAll(childItem.tree.asSequence().map { it.named(token + it.name) })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun get(name: Name): Data<T>? = when (name.length) {
|
||||||
|
0 -> null
|
||||||
|
1 -> items[name.firstOrNull()!!].data
|
||||||
|
else -> items[name.firstOrNull()!!].tree?.get(name.cutFirst())
|
||||||
|
}
|
||||||
|
|
||||||
|
public companion object {
|
||||||
|
public const val TYPE: String = "dataTree"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A name token used to designate tree node meta
|
||||||
|
*/
|
||||||
|
public val META_ITEM_NAME_TOKEN: NameToken = NameToken("@meta")
|
||||||
|
|
||||||
|
@DFInternal
|
||||||
|
public fun <T : Any> emptyWithType(type: KType, meta: Meta = Meta.EMPTY): DataTree<T> = object : DataTree<T> {
|
||||||
|
override val items: Map<NameToken, DataTreeItem<T>> get() = emptyMap()
|
||||||
|
override val dataType: KType get() = type
|
||||||
|
override val meta: Meta get() = meta
|
||||||
|
}
|
||||||
|
|
||||||
|
@OptIn(DFInternal::class)
|
||||||
|
public inline fun <reified T : Any> empty(meta: Meta = Meta.EMPTY): DataTree<T> =
|
||||||
|
emptyWithType<T>(typeOf<T>(), meta)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public fun <T : Any> DataTree<T>.listChildren(prefix: Name): List<Name> =
|
||||||
|
getItem(prefix).tree?.items?.keys?.map { prefix + it } ?: emptyList()
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get a [DataTreeItem] with given [name] or null if the item does not exist
|
||||||
|
*/
|
||||||
|
public tailrec fun <T : Any> DataTree<T>.getItem(name: Name): DataTreeItem<T>? = when (name.length) {
|
||||||
|
0 -> DataTreeItem.Node(this)
|
||||||
|
1 -> items[name.firstOrNull()]
|
||||||
|
else -> items[name.firstOrNull()!!].tree?.getItem(name.cutFirst())
|
||||||
|
}
|
||||||
|
|
||||||
|
public val <T : Any> DataTreeItem<T>?.tree: DataTree<T>? get() = (this as? DataTreeItem.Node<T>)?.tree
|
||||||
|
public val <T : Any> DataTreeItem<T>?.data: Data<T>? get() = (this as? DataTreeItem.Leaf<T>)?.data
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A [Sequence] of all children including nodes
|
||||||
|
*/
|
||||||
|
public fun <T : Any> DataTree<T>.traverseItems(): Sequence<Pair<Name, DataTreeItem<T>>> = sequence {
|
||||||
|
items.forEach { (head, item) ->
|
||||||
|
yield(head.asName() to item)
|
||||||
|
if (item is DataTreeItem.Node) {
|
||||||
|
val subSequence = item.tree.traverseItems()
|
||||||
|
.map { (name, data) -> (head.asName() + name) to data }
|
||||||
|
yieldAll(subSequence)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get a branch of this [DataTree] with a given [branchName].
|
||||||
|
* The difference from similar method for [DataSet] is that internal logic is more simple and the return value is a [DataTree]
|
||||||
|
*/
|
||||||
|
@OptIn(DFInternal::class)
|
||||||
|
public fun <T : Any> DataTree<T>.branch(branchName: Name): DataTree<T> =
|
||||||
|
getItem(branchName)?.tree ?: DataTree.emptyWithType(dataType)
|
||||||
|
|
||||||
|
public fun <T : Any> DataTree<T>.branch(branchName: String): DataTree<T> = branch(branchName.parseAsName())
|
@ -0,0 +1,127 @@
|
|||||||
|
package space.kscience.dataforge.data
|
||||||
|
|
||||||
|
import kotlinx.coroutines.CoroutineScope
|
||||||
|
import kotlinx.coroutines.Job
|
||||||
|
import kotlinx.coroutines.flow.MutableSharedFlow
|
||||||
|
import kotlinx.coroutines.launch
|
||||||
|
import space.kscience.dataforge.meta.Meta
|
||||||
|
import space.kscience.dataforge.misc.DFInternal
|
||||||
|
import space.kscience.dataforge.misc.ThreadSafe
|
||||||
|
import space.kscience.dataforge.names.*
|
||||||
|
import kotlin.collections.set
|
||||||
|
import kotlin.coroutines.CoroutineContext
|
||||||
|
import kotlin.coroutines.coroutineContext
|
||||||
|
import kotlin.reflect.KType
|
||||||
|
import kotlin.reflect.typeOf
|
||||||
|
|
||||||
|
public interface DataSourceBuilder<T : Any> : DataSetBuilder<T>, DataSource<T> {
|
||||||
|
override val updates: MutableSharedFlow<Name>
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A mutable [DataTree] that propagates updates
|
||||||
|
*/
|
||||||
|
public class DataTreeBuilder<T : Any>(
|
||||||
|
override val dataType: KType,
|
||||||
|
coroutineContext: CoroutineContext,
|
||||||
|
) : DataTree<T>, DataSourceBuilder<T> {
|
||||||
|
|
||||||
|
override val coroutineContext: CoroutineContext =
|
||||||
|
coroutineContext + Job(coroutineContext[Job]) + GoalExecutionRestriction()
|
||||||
|
|
||||||
|
private val treeItems = HashMap<NameToken, DataTreeItem<T>>()
|
||||||
|
|
||||||
|
override val items: Map<NameToken, DataTreeItem<T>>
|
||||||
|
get() = treeItems.filter { !it.key.body.startsWith("@") }
|
||||||
|
|
||||||
|
override val updates: MutableSharedFlow<Name> = MutableSharedFlow<Name>()
|
||||||
|
|
||||||
|
@ThreadSafe
|
||||||
|
private fun remove(token: NameToken) {
|
||||||
|
if (treeItems.remove(token) != null) {
|
||||||
|
launch {
|
||||||
|
updates.emit(token.asName())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun remove(name: Name) {
|
||||||
|
if (name.isEmpty()) error("Can't remove the root node")
|
||||||
|
(getItem(name.cutLast()).tree as? DataTreeBuilder)?.remove(name.lastOrNull()!!)
|
||||||
|
}
|
||||||
|
|
||||||
|
@ThreadSafe
|
||||||
|
private fun set(token: NameToken, data: Data<T>) {
|
||||||
|
treeItems[token] = DataTreeItem.Leaf(data)
|
||||||
|
}
|
||||||
|
|
||||||
|
@ThreadSafe
|
||||||
|
private fun set(token: NameToken, node: DataTree<T>) {
|
||||||
|
treeItems[token] = DataTreeItem.Node(node)
|
||||||
|
}
|
||||||
|
|
||||||
|
private fun getOrCreateNode(token: NameToken): DataTreeBuilder<T> =
|
||||||
|
(treeItems[token] as? DataTreeItem.Node<T>)?.tree as? DataTreeBuilder<T>
|
||||||
|
?: DataTreeBuilder<T>(dataType, coroutineContext).also { set(token, it) }
|
||||||
|
|
||||||
|
private fun getOrCreateNode(name: Name): DataTreeBuilder<T> = when (name.length) {
|
||||||
|
0 -> this
|
||||||
|
1 -> getOrCreateNode(name.firstOrNull()!!)
|
||||||
|
else -> getOrCreateNode(name.firstOrNull()!!).getOrCreateNode(name.cutFirst())
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun data(name: Name, data: Data<T>?) {
|
||||||
|
if (data == null) {
|
||||||
|
remove(name)
|
||||||
|
} else {
|
||||||
|
when (name.length) {
|
||||||
|
0 -> error("Can't add data with empty name")
|
||||||
|
1 -> set(name.firstOrNull()!!, data)
|
||||||
|
2 -> getOrCreateNode(name.cutLast()).set(name.lastOrNull()!!, data)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
launch {
|
||||||
|
updates.emit(name)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun meta(name: Name, meta: Meta) {
|
||||||
|
val item = getItem(name)
|
||||||
|
if (item is DataTreeItem.Leaf) error("TODO: Can't change meta of existing leaf item.")
|
||||||
|
data(name + DataTree.META_ITEM_NAME_TOKEN, Data.empty(meta))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a dynamic [DataSource]. Initial data is placed synchronously.
|
||||||
|
*/
|
||||||
|
@DFInternal
|
||||||
|
@Suppress("FunctionName")
|
||||||
|
public fun <T : Any> DataSource(
|
||||||
|
type: KType,
|
||||||
|
parent: CoroutineScope,
|
||||||
|
block: DataSourceBuilder<T>.() -> Unit,
|
||||||
|
): DataTreeBuilder<T> = DataTreeBuilder<T>(type, parent.coroutineContext).apply(block)
|
||||||
|
|
||||||
|
@Suppress("OPT_IN_USAGE", "FunctionName")
|
||||||
|
public inline fun <reified T : Any> DataSource(
|
||||||
|
parent: CoroutineScope,
|
||||||
|
crossinline block: DataSourceBuilder<T>.() -> Unit,
|
||||||
|
): DataTreeBuilder<T> = DataSource(typeOf<T>(), parent) { block() }
|
||||||
|
|
||||||
|
@Suppress("FunctionName")
|
||||||
|
public suspend inline fun <reified T : Any> DataSource(
|
||||||
|
crossinline block: DataSourceBuilder<T>.() -> Unit = {},
|
||||||
|
): DataTreeBuilder<T> = DataTreeBuilder<T>(typeOf<T>(), coroutineContext).apply { block() }
|
||||||
|
|
||||||
|
public inline fun <reified T : Any> DataSourceBuilder<T>.emit(
|
||||||
|
name: Name,
|
||||||
|
parent: CoroutineScope,
|
||||||
|
noinline block: DataSourceBuilder<T>.() -> Unit,
|
||||||
|
): Unit = node(name, DataSource(parent, block))
|
||||||
|
|
||||||
|
public inline fun <reified T : Any> DataSourceBuilder<T>.emit(
|
||||||
|
name: String,
|
||||||
|
parent: CoroutineScope,
|
||||||
|
noinline block: DataSourceBuilder<T>.() -> Unit,
|
||||||
|
): Unit = node(Name.parse(name), DataSource(parent, block))
|
@ -0,0 +1,112 @@
|
|||||||
|
package space.kscience.dataforge.data
|
||||||
|
|
||||||
|
import kotlinx.coroutines.*
|
||||||
|
import space.kscience.dataforge.misc.DFExperimental
|
||||||
|
import kotlin.coroutines.CoroutineContext
|
||||||
|
import kotlin.coroutines.EmptyCoroutineContext
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Lazy computation result with its dependencies to allowing to stat computing dependencies ahead of time
|
||||||
|
*/
|
||||||
|
public interface Goal<out T> {
|
||||||
|
public val dependencies: Collection<Goal<*>>
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns current running coroutine if the goal is started. Null if the computation is not started.
|
||||||
|
*/
|
||||||
|
public val deferred: Deferred<T>?
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get ongoing computation or start a new one.
|
||||||
|
* Does not guarantee thread safety. In case of multi-thread access, could create orphan computations.
|
||||||
|
*
|
||||||
|
* If the computation is already running, the scope is not used.
|
||||||
|
*/
|
||||||
|
public fun async(coroutineScope: CoroutineScope): Deferred<T>
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Reset the computation
|
||||||
|
*/
|
||||||
|
public fun reset()
|
||||||
|
|
||||||
|
public companion object
|
||||||
|
}
|
||||||
|
|
||||||
|
public fun Goal<*>.launch(coroutineScope: CoroutineScope): Job = async(coroutineScope)
|
||||||
|
|
||||||
|
public suspend fun <T> Goal<T>.await(): T = coroutineScope { async(this).await() }
|
||||||
|
|
||||||
|
public val Goal<*>.isComplete: Boolean get() = deferred?.isCompleted ?: false
|
||||||
|
|
||||||
|
public open class StaticGoal<T>(public val value: T) : Goal<T> {
|
||||||
|
override val dependencies: Collection<Goal<*>> get() = emptyList()
|
||||||
|
override val deferred: Deferred<T> = CompletableDeferred(value)
|
||||||
|
|
||||||
|
override fun async(coroutineScope: CoroutineScope): Deferred<T> = deferred
|
||||||
|
|
||||||
|
override fun reset() {
|
||||||
|
//doNothing
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param coroutineContext additional context information
|
||||||
|
*/
|
||||||
|
public open class LazyGoal<T>(
|
||||||
|
private val coroutineContext: CoroutineContext = EmptyCoroutineContext,
|
||||||
|
override val dependencies: Collection<Goal<*>> = emptyList(),
|
||||||
|
public val block: suspend () -> T,
|
||||||
|
) : Goal<T> {
|
||||||
|
|
||||||
|
final override var deferred: Deferred<T>? = null
|
||||||
|
private set
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get ongoing computation or start a new one.
|
||||||
|
* Does not guarantee thread safety. In case of multi-thread access, could create orphan computations.
|
||||||
|
* If [GoalExecutionRestriction] is present in the [coroutineScope] context, the call could produce a error a warning
|
||||||
|
* depending on the settings.
|
||||||
|
*/
|
||||||
|
@OptIn(DFExperimental::class)
|
||||||
|
override fun async(coroutineScope: CoroutineScope): Deferred<T> {
|
||||||
|
val log = coroutineScope.coroutineContext[GoalLogger]
|
||||||
|
// Check if context restricts goal computation
|
||||||
|
coroutineScope.coroutineContext[GoalExecutionRestriction]?.let { restriction ->
|
||||||
|
when (restriction.policy) {
|
||||||
|
GoalExecutionRestrictionPolicy.WARNING -> log?.emit(GoalLogger.WARNING_TAG) { "Goal eager execution is prohibited by the coroutine scope policy" }
|
||||||
|
GoalExecutionRestrictionPolicy.ERROR -> error("Goal eager execution is prohibited by the coroutine scope policy")
|
||||||
|
else -> {
|
||||||
|
/*do nothing*/
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
log?.emit { "Starting dependencies computation for ${this@LazyGoal}" }
|
||||||
|
val startedDependencies = this.dependencies.map { goal ->
|
||||||
|
goal.run { async(coroutineScope) }
|
||||||
|
}
|
||||||
|
return deferred ?: coroutineScope.async(
|
||||||
|
coroutineContext
|
||||||
|
+ CoroutineMonitor()
|
||||||
|
+ Dependencies(startedDependencies)
|
||||||
|
+ GoalExecutionRestriction(GoalExecutionRestrictionPolicy.NONE) // Remove restrictions on goal execution
|
||||||
|
) {
|
||||||
|
//cancel execution if error encountered in one of dependencies
|
||||||
|
startedDependencies.forEach { deferred ->
|
||||||
|
deferred.invokeOnCompletion { error ->
|
||||||
|
if (error != null) this.cancel(CancellationException("Dependency $deferred failed with error: ${error.message}"))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
coroutineContext[GoalLogger]?.emit { "Starting computation of ${this@LazyGoal}" }
|
||||||
|
block()
|
||||||
|
}.also { deferred = it }
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Reset the computation
|
||||||
|
*/
|
||||||
|
override fun reset() {
|
||||||
|
deferred?.cancel()
|
||||||
|
deferred = null
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,31 @@
|
|||||||
|
package space.kscience.dataforge.data
|
||||||
|
|
||||||
|
import kotlin.coroutines.CoroutineContext
|
||||||
|
|
||||||
|
public enum class GoalExecutionRestrictionPolicy {
|
||||||
|
/**
|
||||||
|
* Allow eager execution
|
||||||
|
*/
|
||||||
|
NONE,
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Give warning on eager execution
|
||||||
|
*/
|
||||||
|
WARNING,
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Throw error on eager execution
|
||||||
|
*/
|
||||||
|
ERROR
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A special coroutine context key that allows or disallows goal execution during configuration time (eager execution).
|
||||||
|
*/
|
||||||
|
public class GoalExecutionRestriction(
|
||||||
|
public val policy: GoalExecutionRestrictionPolicy = GoalExecutionRestrictionPolicy.ERROR,
|
||||||
|
) : CoroutineContext.Element {
|
||||||
|
override val key: CoroutineContext.Key<*> get() = Companion
|
||||||
|
|
||||||
|
public companion object : CoroutineContext.Key<GoalExecutionRestriction>
|
||||||
|
}
|
@ -0,0 +1,16 @@
|
|||||||
|
package space.kscience.dataforge.data
|
||||||
|
|
||||||
|
import kotlin.coroutines.CoroutineContext
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Coroutine context element that provides logging capabilities
|
||||||
|
*/
|
||||||
|
public interface GoalLogger : CoroutineContext.Element {
|
||||||
|
override val key: CoroutineContext.Key<*> get() = GoalLogger
|
||||||
|
|
||||||
|
public fun emit(vararg tags: String, message: suspend () -> String)
|
||||||
|
|
||||||
|
public companion object : CoroutineContext.Key<GoalLogger>{
|
||||||
|
public const val WARNING_TAG: String = "WARNING"
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,78 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2015 Alexander Nozik.
|
||||||
|
*
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
package space.kscience.dataforge.data
|
||||||
|
|
||||||
|
import kotlinx.coroutines.launch
|
||||||
|
import space.kscience.dataforge.meta.get
|
||||||
|
import space.kscience.dataforge.meta.string
|
||||||
|
import space.kscience.dataforge.misc.DFInternal
|
||||||
|
|
||||||
|
public interface GroupRule {
|
||||||
|
public fun <T : Any> gather(set: DataSet<T>): Map<String, DataSet<T>>
|
||||||
|
|
||||||
|
public companion object {
|
||||||
|
/**
|
||||||
|
* Create grouping rule that creates groups for different values of value
|
||||||
|
* field with name [key]
|
||||||
|
*
|
||||||
|
* @param key
|
||||||
|
* @param defaultTagValue
|
||||||
|
* @return
|
||||||
|
*/
|
||||||
|
@OptIn(DFInternal::class)
|
||||||
|
public fun byMetaValue(
|
||||||
|
key: String,
|
||||||
|
defaultTagValue: String,
|
||||||
|
): GroupRule = object : GroupRule {
|
||||||
|
|
||||||
|
override fun <T : Any> gather(
|
||||||
|
set: DataSet<T>,
|
||||||
|
): Map<String, DataSet<T>> {
|
||||||
|
val map = HashMap<String, DataSet<T>>()
|
||||||
|
|
||||||
|
if (set is DataSource) {
|
||||||
|
set.forEach { data ->
|
||||||
|
val tagValue: String = data.meta[key]?.string ?: defaultTagValue
|
||||||
|
(map.getOrPut(tagValue) { DataTreeBuilder(set.dataType, set.coroutineContext) } as DataTreeBuilder<T>)
|
||||||
|
.data(data.name, data.data)
|
||||||
|
|
||||||
|
set.launch {
|
||||||
|
set.updates.collect { name ->
|
||||||
|
val dataUpdate = set[name]
|
||||||
|
|
||||||
|
val updateTagValue = dataUpdate?.meta?.get(key)?.string ?: defaultTagValue
|
||||||
|
map.getOrPut(updateTagValue) {
|
||||||
|
DataSource(set.dataType, this) {
|
||||||
|
data(name, dataUpdate)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
set.forEach { data ->
|
||||||
|
val tagValue: String = data.meta[key]?.string ?: defaultTagValue
|
||||||
|
(map.getOrPut(tagValue) { StaticDataTree(set.dataType) } as StaticDataTree<T>)
|
||||||
|
.data(data.name, data.data)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
return map
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,35 @@
|
|||||||
|
package space.kscience.dataforge.data
|
||||||
|
|
||||||
|
import space.kscience.dataforge.meta.isEmpty
|
||||||
|
import space.kscience.dataforge.misc.Named
|
||||||
|
import space.kscience.dataforge.names.Name
|
||||||
|
|
||||||
|
public interface NamedData<out T : Any> : Named, Data<T> {
|
||||||
|
override val name: Name
|
||||||
|
public val data: Data<T>
|
||||||
|
}
|
||||||
|
|
||||||
|
public operator fun NamedData<*>.component1(): Name = name
|
||||||
|
public operator fun <T: Any> NamedData<T>.component2(): Data<T> = data
|
||||||
|
|
||||||
|
private class NamedDataImpl<out T : Any>(
|
||||||
|
override val name: Name,
|
||||||
|
override val data: Data<T>,
|
||||||
|
) : Data<T> by data, NamedData<T> {
|
||||||
|
override fun toString(): String = buildString {
|
||||||
|
append("NamedData(name=\"$name\"")
|
||||||
|
if (data is StaticData) {
|
||||||
|
append(", value=${data.value}")
|
||||||
|
}
|
||||||
|
if (!data.meta.isEmpty()) {
|
||||||
|
append(", meta=${data.meta}")
|
||||||
|
}
|
||||||
|
append(")")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public fun <T : Any> Data<T>.named(name: Name): NamedData<T> = if (this is NamedData) {
|
||||||
|
NamedDataImpl(name, this.data)
|
||||||
|
} else {
|
||||||
|
NamedDataImpl(name, this)
|
||||||
|
}
|
@ -0,0 +1,82 @@
|
|||||||
|
package space.kscience.dataforge.data
|
||||||
|
|
||||||
|
import space.kscience.dataforge.meta.Meta
|
||||||
|
import space.kscience.dataforge.misc.DFExperimental
|
||||||
|
import space.kscience.dataforge.names.*
|
||||||
|
import kotlin.reflect.KType
|
||||||
|
import kotlin.reflect.typeOf
|
||||||
|
|
||||||
|
@PublishedApi
|
||||||
|
internal class StaticDataTree<T : Any>(
|
||||||
|
override val dataType: KType,
|
||||||
|
) : DataSetBuilder<T>, DataTree<T> {
|
||||||
|
|
||||||
|
private val _items: MutableMap<NameToken, DataTreeItem<T>> = HashMap()
|
||||||
|
|
||||||
|
override val items: Map<NameToken, DataTreeItem<T>>
|
||||||
|
get() = _items.filter { !it.key.body.startsWith("@") }
|
||||||
|
|
||||||
|
override fun remove(name: Name) {
|
||||||
|
when (name.length) {
|
||||||
|
0 -> error("Can't remove root tree node")
|
||||||
|
1 -> _items.remove(name.firstOrNull()!!)
|
||||||
|
else -> (_items[name.firstOrNull()!!].tree as? StaticDataTree<T>)?.remove(name.cutFirst())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private fun getOrCreateNode(name: Name): StaticDataTree<T> = when (name.length) {
|
||||||
|
0 -> this
|
||||||
|
1 -> {
|
||||||
|
val itemName = name.firstOrNull()!!
|
||||||
|
(_items[itemName].tree as? StaticDataTree<T>) ?: StaticDataTree<T>(dataType).also {
|
||||||
|
_items[itemName] = DataTreeItem.Node(it)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else -> getOrCreateNode(name.cutLast()).getOrCreateNode(name.lastOrNull()!!.asName())
|
||||||
|
}
|
||||||
|
|
||||||
|
private fun set(name: Name, item: DataTreeItem<T>?) {
|
||||||
|
if (name.isEmpty()) error("Can't set top level tree node")
|
||||||
|
if (item == null) {
|
||||||
|
remove(name)
|
||||||
|
} else {
|
||||||
|
getOrCreateNode(name.cutLast())._items[name.lastOrNull()!!] = item
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun data(name: Name, data: Data<T>?) {
|
||||||
|
set(name, data?.let { DataTreeItem.Leaf(it) })
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun node(name: Name, dataSet: DataSet<T>) {
|
||||||
|
if (dataSet is StaticDataTree) {
|
||||||
|
set(name, DataTreeItem.Node(dataSet))
|
||||||
|
} else {
|
||||||
|
dataSet.forEach {
|
||||||
|
data(name + it.name, it.data)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun meta(name: Name, meta: Meta) {
|
||||||
|
val item = getItem(name)
|
||||||
|
if (item is DataTreeItem.Leaf) TODO("Can't change meta of existing leaf item.")
|
||||||
|
data(name + DataTree.META_ITEM_NAME_TOKEN, Data.empty(meta))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Suppress("FunctionName")
|
||||||
|
public inline fun <T : Any> DataTree(
|
||||||
|
dataType: KType,
|
||||||
|
block: DataSetBuilder<T>.() -> Unit,
|
||||||
|
): DataTree<T> = StaticDataTree<T>(dataType).apply { block() }
|
||||||
|
|
||||||
|
@Suppress("FunctionName")
|
||||||
|
public inline fun <reified T : Any> DataTree(
|
||||||
|
noinline block: DataSetBuilder<T>.() -> Unit,
|
||||||
|
): DataTree<T> = DataTree(typeOf<T>(), block)
|
||||||
|
|
||||||
|
@OptIn(DFExperimental::class)
|
||||||
|
public fun <T : Any> DataSet<T>.seal(): DataTree<T> = DataTree(dataType) {
|
||||||
|
populateFrom(this@seal)
|
||||||
|
}
|
@ -0,0 +1,105 @@
|
|||||||
|
package space.kscience.dataforge.data
|
||||||
|
|
||||||
|
import kotlinx.coroutines.flow.Flow
|
||||||
|
import kotlinx.coroutines.flow.filter
|
||||||
|
import kotlinx.coroutines.flow.map
|
||||||
|
import kotlinx.coroutines.flow.mapNotNull
|
||||||
|
import space.kscience.dataforge.meta.Meta
|
||||||
|
import space.kscience.dataforge.misc.DFExperimental
|
||||||
|
import space.kscience.dataforge.names.*
|
||||||
|
import kotlin.coroutines.CoroutineContext
|
||||||
|
import kotlin.coroutines.EmptyCoroutineContext
|
||||||
|
import kotlin.reflect.KType
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A stateless filtered [DataSet]
|
||||||
|
*/
|
||||||
|
public fun <T : Any> DataSet<T>.filter(
|
||||||
|
predicate: (Name, Meta) -> Boolean,
|
||||||
|
): DataSource<T> = object : DataSource<T> {
|
||||||
|
|
||||||
|
override val dataType: KType get() = this@filter.dataType
|
||||||
|
|
||||||
|
override val coroutineContext: CoroutineContext
|
||||||
|
get() = (this@filter as? DataSource)?.coroutineContext ?: EmptyCoroutineContext
|
||||||
|
|
||||||
|
|
||||||
|
override val meta: Meta get() = this@filter.meta
|
||||||
|
|
||||||
|
override fun iterator(): Iterator<NamedData<T>> = iterator {
|
||||||
|
for (d in this@filter) {
|
||||||
|
if (predicate(d.name, d.meta)) {
|
||||||
|
yield(d)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun get(name: Name): Data<T>? = this@filter.get(name)?.takeIf {
|
||||||
|
predicate(name, it.meta)
|
||||||
|
}
|
||||||
|
|
||||||
|
override val updates: Flow<Name> = this@filter.updates.filter flowFilter@{ name ->
|
||||||
|
val theData = this@filter[name] ?: return@flowFilter false
|
||||||
|
predicate(name, theData.meta)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generate a wrapper data set with a given name prefix appended to all names
|
||||||
|
*/
|
||||||
|
public fun <T : Any> DataSet<T>.withNamePrefix(prefix: Name): DataSet<T> = if (prefix.isEmpty()) {
|
||||||
|
this
|
||||||
|
} else object : DataSource<T> {
|
||||||
|
|
||||||
|
override val dataType: KType get() = this@withNamePrefix.dataType
|
||||||
|
|
||||||
|
override val coroutineContext: CoroutineContext
|
||||||
|
get() = (this@withNamePrefix as? DataSource)?.coroutineContext ?: EmptyCoroutineContext
|
||||||
|
|
||||||
|
override val meta: Meta get() = this@withNamePrefix.meta
|
||||||
|
|
||||||
|
|
||||||
|
override fun iterator(): Iterator<NamedData<T>> = iterator {
|
||||||
|
for (d in this@withNamePrefix) {
|
||||||
|
yield(d.data.named(prefix + d.name))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun get(name: Name): Data<T>? =
|
||||||
|
name.removeFirstOrNull(name)?.let { this@withNamePrefix.get(it) }
|
||||||
|
|
||||||
|
override val updates: Flow<Name> get() = this@withNamePrefix.updates.map { prefix + it }
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get a subset of data starting with a given [branchName]
|
||||||
|
*/
|
||||||
|
public fun <T : Any> DataSet<T>.branch(branchName: Name): DataSet<T> = if (branchName.isEmpty()) {
|
||||||
|
this
|
||||||
|
} else object : DataSource<T> {
|
||||||
|
override val dataType: KType get() = this@branch.dataType
|
||||||
|
|
||||||
|
override val coroutineContext: CoroutineContext
|
||||||
|
get() = (this@branch as? DataSource)?.coroutineContext ?: EmptyCoroutineContext
|
||||||
|
|
||||||
|
override val meta: Meta get() = this@branch.meta
|
||||||
|
|
||||||
|
override fun iterator(): Iterator<NamedData<T>> = iterator {
|
||||||
|
for (d in this@branch) {
|
||||||
|
d.name.removeFirstOrNull(branchName)?.let { name ->
|
||||||
|
yield(d.data.named(name))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun get(name: Name): Data<T>? = this@branch.get(branchName + name)
|
||||||
|
|
||||||
|
override val updates: Flow<Name> get() = this@branch.updates.mapNotNull { it.removeFirstOrNull(branchName) }
|
||||||
|
}
|
||||||
|
|
||||||
|
public fun <T : Any> DataSet<T>.branch(branchName: String): DataSet<T> = this@branch.branch(branchName.parseAsName())
|
||||||
|
|
||||||
|
@DFExperimental
|
||||||
|
public suspend fun <T : Any> DataSet<T>.rootData(): Data<T>? = get(Name.EMPTY)
|
||||||
|
|
@ -0,0 +1,221 @@
|
|||||||
|
package space.kscience.dataforge.data
|
||||||
|
|
||||||
|
import space.kscience.dataforge.meta.Meta
|
||||||
|
import space.kscience.dataforge.meta.MutableMeta
|
||||||
|
import space.kscience.dataforge.meta.seal
|
||||||
|
import space.kscience.dataforge.meta.toMutableMeta
|
||||||
|
import space.kscience.dataforge.misc.DFInternal
|
||||||
|
import space.kscience.dataforge.names.Name
|
||||||
|
import kotlin.coroutines.CoroutineContext
|
||||||
|
import kotlin.coroutines.EmptyCoroutineContext
|
||||||
|
import kotlin.reflect.KType
|
||||||
|
import kotlin.reflect.typeOf
|
||||||
|
|
||||||
|
public data class ValueWithMeta<T>(val meta: Meta, val value: T)
|
||||||
|
|
||||||
|
public suspend fun <T : Any> Data<T>.awaitWithMeta(): ValueWithMeta<T> = ValueWithMeta(meta, await())
|
||||||
|
|
||||||
|
public data class NamedValueWithMeta<T>(val name: Name, val meta: Meta, val value: T)
|
||||||
|
|
||||||
|
public suspend fun <T : Any> NamedData<T>.awaitWithMeta(): NamedValueWithMeta<T> =
|
||||||
|
NamedValueWithMeta(name, meta, await())
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Lazily transform this data to another data. By convention [block] should not use external data (be pure).
|
||||||
|
* @param coroutineContext additional [CoroutineContext] elements used for data computation.
|
||||||
|
* @param meta for the resulting data. By default equals input data.
|
||||||
|
* @param block the transformation itself
|
||||||
|
*/
|
||||||
|
public inline fun <T : Any, reified R : Any> Data<T>.map(
|
||||||
|
coroutineContext: CoroutineContext = EmptyCoroutineContext,
|
||||||
|
meta: Meta = this.meta,
|
||||||
|
crossinline block: suspend (T) -> R,
|
||||||
|
): Data<R> = Data(meta, coroutineContext, listOf(this)) {
|
||||||
|
block(await())
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Combine this data with the other data using [block]. See [Data::map] for other details
|
||||||
|
*/
|
||||||
|
public inline fun <T1 : Any, T2 : Any, reified R : Any> Data<T1>.combine(
|
||||||
|
other: Data<T2>,
|
||||||
|
coroutineContext: CoroutineContext = EmptyCoroutineContext,
|
||||||
|
meta: Meta = this.meta,
|
||||||
|
crossinline block: suspend (left: T1, right: T2) -> R,
|
||||||
|
): Data<R> = Data(meta, coroutineContext, listOf(this, other)) {
|
||||||
|
block(await(), other.await())
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
//data collection operations
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Lazily reduce a collection of [Data] to a single data.
|
||||||
|
*/
|
||||||
|
public inline fun <T : Any, reified R : Any> Collection<Data<T>>.reduceToData(
|
||||||
|
coroutineContext: CoroutineContext = EmptyCoroutineContext,
|
||||||
|
meta: Meta = Meta.EMPTY,
|
||||||
|
crossinline block: suspend (List<ValueWithMeta<T>>) -> R,
|
||||||
|
): Data<R> = Data(
|
||||||
|
meta,
|
||||||
|
coroutineContext,
|
||||||
|
this
|
||||||
|
) {
|
||||||
|
block(map { it.awaitWithMeta() })
|
||||||
|
}
|
||||||
|
|
||||||
|
@DFInternal
|
||||||
|
public fun <K, T : Any, R : Any> Map<K, Data<T>>.reduceToData(
|
||||||
|
outputType: KType,
|
||||||
|
coroutineContext: CoroutineContext = EmptyCoroutineContext,
|
||||||
|
meta: Meta = Meta.EMPTY,
|
||||||
|
block: suspend (Map<K, ValueWithMeta<T>>) -> R,
|
||||||
|
): Data<R> = Data(
|
||||||
|
outputType,
|
||||||
|
meta,
|
||||||
|
coroutineContext,
|
||||||
|
this.values
|
||||||
|
) {
|
||||||
|
block(mapValues { it.value.awaitWithMeta() })
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Lazily reduce a [Map] of [Data] with any static key.
|
||||||
|
* @param K type of the map key
|
||||||
|
* @param T type of the input goal
|
||||||
|
* @param R type of the result goal
|
||||||
|
*/
|
||||||
|
public inline fun <K, T : Any, reified R : Any> Map<K, Data<T>>.reduceToData(
|
||||||
|
coroutineContext: CoroutineContext = EmptyCoroutineContext,
|
||||||
|
meta: Meta = Meta.EMPTY,
|
||||||
|
crossinline block: suspend (Map<K, ValueWithMeta<T>>) -> R,
|
||||||
|
): Data<R> = Data(
|
||||||
|
meta,
|
||||||
|
coroutineContext,
|
||||||
|
this.values
|
||||||
|
) {
|
||||||
|
block(mapValues { it.value.awaitWithMeta() })
|
||||||
|
}
|
||||||
|
|
||||||
|
//Iterable operations
|
||||||
|
|
||||||
|
@DFInternal
|
||||||
|
public inline fun <T : Any, R : Any> Iterable<Data<T>>.reduceToData(
|
||||||
|
outputType: KType,
|
||||||
|
coroutineContext: CoroutineContext = EmptyCoroutineContext,
|
||||||
|
meta: Meta = Meta.EMPTY,
|
||||||
|
crossinline transformation: suspend (Collection<ValueWithMeta<T>>) -> R,
|
||||||
|
): Data<R> = Data(
|
||||||
|
outputType,
|
||||||
|
meta,
|
||||||
|
coroutineContext,
|
||||||
|
toList()
|
||||||
|
) {
|
||||||
|
transformation(map { it.awaitWithMeta() })
|
||||||
|
}
|
||||||
|
|
||||||
|
@OptIn(DFInternal::class)
|
||||||
|
public inline fun <T : Any, reified R : Any> Iterable<Data<T>>.reduceToData(
|
||||||
|
coroutineContext: CoroutineContext = EmptyCoroutineContext,
|
||||||
|
meta: Meta = Meta.EMPTY,
|
||||||
|
crossinline transformation: suspend (Collection<ValueWithMeta<T>>) -> R,
|
||||||
|
): Data<R> = reduceToData(typeOf<R>(), coroutineContext, meta) {
|
||||||
|
transformation(it)
|
||||||
|
}
|
||||||
|
|
||||||
|
public inline fun <T : Any, reified R : Any> Iterable<Data<T>>.foldToData(
|
||||||
|
initial: R,
|
||||||
|
coroutineContext: CoroutineContext = EmptyCoroutineContext,
|
||||||
|
meta: Meta = Meta.EMPTY,
|
||||||
|
crossinline block: suspend (result: R, data: ValueWithMeta<T>) -> R,
|
||||||
|
): Data<R> = reduceToData(
|
||||||
|
coroutineContext, meta
|
||||||
|
) {
|
||||||
|
it.fold(initial) { acc, t -> block(acc, t) }
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Transform an [Iterable] of [NamedData] to a single [Data].
|
||||||
|
*/
|
||||||
|
@DFInternal
|
||||||
|
public inline fun <T : Any, R : Any> Iterable<NamedData<T>>.reduceNamedToData(
|
||||||
|
outputType: KType,
|
||||||
|
coroutineContext: CoroutineContext = EmptyCoroutineContext,
|
||||||
|
meta: Meta = Meta.EMPTY,
|
||||||
|
crossinline transformation: suspend (Collection<NamedValueWithMeta<T>>) -> R,
|
||||||
|
): Data<R> = Data(
|
||||||
|
outputType,
|
||||||
|
meta,
|
||||||
|
coroutineContext,
|
||||||
|
toList()
|
||||||
|
) {
|
||||||
|
transformation(map { it.awaitWithMeta() })
|
||||||
|
}
|
||||||
|
|
||||||
|
@OptIn(DFInternal::class)
|
||||||
|
public inline fun <T : Any, reified R : Any> Iterable<NamedData<T>>.reduceNamedToData(
|
||||||
|
coroutineContext: CoroutineContext = EmptyCoroutineContext,
|
||||||
|
meta: Meta = Meta.EMPTY,
|
||||||
|
crossinline transformation: suspend (Collection<NamedValueWithMeta<T>>) -> R,
|
||||||
|
): Data<R> = reduceNamedToData(typeOf<R>(), coroutineContext, meta) {
|
||||||
|
transformation(it)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Fold a [Iterable] of named data into a single [Data]
|
||||||
|
*/
|
||||||
|
public inline fun <T : Any, reified R : Any> Iterable<NamedData<T>>.foldNamedToData(
|
||||||
|
initial: R,
|
||||||
|
coroutineContext: CoroutineContext = EmptyCoroutineContext,
|
||||||
|
meta: Meta = Meta.EMPTY,
|
||||||
|
crossinline block: suspend (result: R, data: NamedValueWithMeta<T>) -> R,
|
||||||
|
): Data<R> = reduceNamedToData(
|
||||||
|
coroutineContext, meta
|
||||||
|
) {
|
||||||
|
it.fold(initial) { acc, t -> block(acc, t) }
|
||||||
|
}
|
||||||
|
|
||||||
|
//DataSet operations
|
||||||
|
|
||||||
|
@DFInternal
|
||||||
|
public suspend fun <T : Any, R : Any> DataSet<T>.map(
|
||||||
|
outputType: KType,
|
||||||
|
coroutineContext: CoroutineContext = EmptyCoroutineContext,
|
||||||
|
metaTransform: MutableMeta.() -> Unit = {},
|
||||||
|
block: suspend (NamedValueWithMeta<T>) -> R,
|
||||||
|
): DataTree<R> = DataTree<R>(outputType) {
|
||||||
|
forEach {
|
||||||
|
val newMeta = it.meta.toMutableMeta().apply(metaTransform).seal()
|
||||||
|
val d = Data(outputType, newMeta, coroutineContext, listOf(it)) {
|
||||||
|
block(it.awaitWithMeta())
|
||||||
|
}
|
||||||
|
data(it.name, d)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@OptIn(DFInternal::class)
|
||||||
|
public suspend inline fun <T : Any, reified R : Any> DataSet<T>.map(
|
||||||
|
coroutineContext: CoroutineContext = EmptyCoroutineContext,
|
||||||
|
noinline metaTransform: MutableMeta.() -> Unit = {},
|
||||||
|
noinline block: suspend (NamedValueWithMeta<T>) -> R,
|
||||||
|
): DataTree<R> = map(typeOf<R>(), coroutineContext, metaTransform, block)
|
||||||
|
|
||||||
|
public inline fun <T : Any> DataSet<T>.forEach(block: (NamedData<T>) -> Unit) {
|
||||||
|
for (d in this) {
|
||||||
|
block(d)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public inline fun <T : Any, reified R : Any> DataSet<T>.reduceToData(
|
||||||
|
coroutineContext: CoroutineContext = EmptyCoroutineContext,
|
||||||
|
meta: Meta = Meta.EMPTY,
|
||||||
|
crossinline transformation: suspend (Iterable<NamedValueWithMeta<T>>) -> R,
|
||||||
|
): Data<R> = asIterable().reduceNamedToData(coroutineContext, meta, transformation)
|
||||||
|
|
||||||
|
public inline fun <T : Any, reified R : Any> DataSet<T>.foldToData(
|
||||||
|
initial: R,
|
||||||
|
coroutineContext: CoroutineContext = EmptyCoroutineContext,
|
||||||
|
meta: Meta = Meta.EMPTY,
|
||||||
|
crossinline block: suspend (result: R, data: NamedValueWithMeta<T>) -> R,
|
||||||
|
): Data<R> = asIterable().foldNamedToData(initial, coroutineContext, meta, block)
|
@ -1,32 +0,0 @@
|
|||||||
package hep.dataforge.data
|
|
||||||
|
|
||||||
import kotlin.test.Test
|
|
||||||
import kotlin.test.assertTrue
|
|
||||||
|
|
||||||
|
|
||||||
internal class DataTreeBuilderTest{
|
|
||||||
@Test
|
|
||||||
fun testDataUpdate(){
|
|
||||||
val updateData = DataNode<Any>{
|
|
||||||
"update" put {
|
|
||||||
"a" put Data.static("a")
|
|
||||||
"b" put Data.static("b")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
val node = DataNode<Any>{
|
|
||||||
node("primary"){
|
|
||||||
static("a","a")
|
|
||||||
static("b","b")
|
|
||||||
}
|
|
||||||
static("root","root")
|
|
||||||
update(updateData)
|
|
||||||
}
|
|
||||||
|
|
||||||
println(node.toMeta())
|
|
||||||
|
|
||||||
assertTrue { node["update.a"] != null }
|
|
||||||
assertTrue { node["primary.a"] != null }
|
|
||||||
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,14 +0,0 @@
|
|||||||
package hep.dataforge.data
|
|
||||||
|
|
||||||
import kotlin.reflect.KClass
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Check that node is compatible with given type meaning that each element could be cast to the type
|
|
||||||
*/
|
|
||||||
internal actual fun <R : Any> DataNode<*>.canCast(type: KClass<out R>): Boolean {
|
|
||||||
return this.type == type
|
|
||||||
}
|
|
||||||
|
|
||||||
internal actual fun <R : Any> Data<*>.canCast(type: KClass<out R>): Boolean {
|
|
||||||
return this.type == type
|
|
||||||
}
|
|
@ -1,27 +0,0 @@
|
|||||||
package hep.dataforge.data
|
|
||||||
|
|
||||||
import hep.dataforge.meta.Meta
|
|
||||||
import hep.dataforge.names.NameToken
|
|
||||||
import kotlin.reflect.KClass
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
|
||||||
* A zero-copy data node wrapper that returns only children with appropriate type.
|
|
||||||
*/
|
|
||||||
public class TypeFilteredDataNode<out T : Any>(public val origin: DataNode<*>, override val type: KClass<out T>) : DataNode<T> {
|
|
||||||
override val meta: Meta get() = origin.meta
|
|
||||||
override val items: Map<NameToken, DataItem<T>> by lazy {
|
|
||||||
origin.items.mapNotNull { (key, item) ->
|
|
||||||
when (item) {
|
|
||||||
is DataItem.Leaf -> {
|
|
||||||
(item.data.filterIsInstance(type))?.let {
|
|
||||||
key to DataItem.Leaf(it)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
is DataItem.Node -> {
|
|
||||||
key to DataItem.Node(item.node.filterIsInstance(type))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}.associate { it }
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,48 +0,0 @@
|
|||||||
package hep.dataforge.data
|
|
||||||
|
|
||||||
import kotlinx.coroutines.runBlocking
|
|
||||||
import kotlin.reflect.KClass
|
|
||||||
import kotlin.reflect.full.isSubclassOf
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Block the thread and get data content
|
|
||||||
*/
|
|
||||||
public fun <T : Any> Data<T>.get(): T = runBlocking { await() }
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Check that node is compatible with given type meaning that each element could be cast to the type
|
|
||||||
*/
|
|
||||||
internal actual fun <R : Any> DataNode<*>.canCast(type: KClass<out R>): Boolean =
|
|
||||||
type.isSubclassOf(this.type)
|
|
||||||
|
|
||||||
internal actual fun <R : Any> Data<*>.canCast(type: KClass<out R>): Boolean =
|
|
||||||
this.type.isSubclassOf(type)
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Cast the node to given type if the cast is possible or return null
|
|
||||||
*/
|
|
||||||
public fun <R : Any> Data<*>.filterIsInstance(type: KClass<out R>): Data<R>? =
|
|
||||||
if (canCast(type)) cast(type) else null
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Filter a node by data and node type. Resulting node and its subnodes is guaranteed to have border type [type],
|
|
||||||
* but could contain empty nodes
|
|
||||||
*/
|
|
||||||
public fun <R : Any> DataNode<*>.filterIsInstance(type: KClass<out R>): DataNode<R> {
|
|
||||||
return when {
|
|
||||||
canCast(type) -> cast(type)
|
|
||||||
this is TypeFilteredDataNode -> origin.filterIsInstance(type)
|
|
||||||
else -> TypeFilteredDataNode(this, type)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Filter all elements of given data item that could be cast to given type. If no elements are available, return null.
|
|
||||||
*/
|
|
||||||
public fun <R : Any> DataItem<*>?.filterIsInstance(type: KClass<out R>): DataItem<R>? = when (this) {
|
|
||||||
null -> null
|
|
||||||
is DataItem.Node -> DataItem.Node(this.node.filterIsInstance(type))
|
|
||||||
is DataItem.Leaf -> this.data.filterIsInstance(type)?.let { DataItem.Leaf(it) }
|
|
||||||
}
|
|
||||||
|
|
||||||
public inline fun <reified R : Any> DataItem<*>?.filterIsInstance(): DataItem<R>? = this@filterIsInstance.filterIsInstance(R::class)
|
|
@ -0,0 +1,2 @@
|
|||||||
|
package space.kscience.dataforge.data
|
||||||
|
|
@ -0,0 +1,85 @@
|
|||||||
|
package space.kscience.dataforge.data
|
||||||
|
|
||||||
|
import kotlinx.coroutines.flow.Flow
|
||||||
|
import kotlinx.coroutines.flow.filter
|
||||||
|
import space.kscience.dataforge.meta.Meta
|
||||||
|
import space.kscience.dataforge.misc.DFExperimental
|
||||||
|
import space.kscience.dataforge.names.Name
|
||||||
|
import kotlin.coroutines.CoroutineContext
|
||||||
|
import kotlin.coroutines.EmptyCoroutineContext
|
||||||
|
import kotlin.reflect.KType
|
||||||
|
import kotlin.reflect.full.isSubtypeOf
|
||||||
|
import kotlin.reflect.typeOf
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Cast the node to given type if the cast is possible or return null
|
||||||
|
*/
|
||||||
|
@Suppress("UNCHECKED_CAST")
|
||||||
|
private fun <R : Any> Data<*>.castOrNull(type: KType): Data<R>? =
|
||||||
|
if (!this.type.isSubtypeOf(type)) {
|
||||||
|
null
|
||||||
|
} else {
|
||||||
|
object : Data<R> by (this as Data<R>) {
|
||||||
|
override val type: KType = type
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Select all data matching given type and filters. Does not modify paths
|
||||||
|
*
|
||||||
|
* @param predicate addition filtering condition based on item name and meta. By default, accepts all
|
||||||
|
*/
|
||||||
|
@OptIn(DFExperimental::class)
|
||||||
|
public fun <R : Any> DataSet<*>.filterByType(
|
||||||
|
type: KType,
|
||||||
|
predicate: (name: Name, meta: Meta) -> Boolean = { _, _ -> true },
|
||||||
|
): DataSource<R> = object : DataSource<R> {
|
||||||
|
override val dataType = type
|
||||||
|
|
||||||
|
override val coroutineContext: CoroutineContext
|
||||||
|
get() = (this@filterByType as? DataSource)?.coroutineContext ?: EmptyCoroutineContext
|
||||||
|
|
||||||
|
override val meta: Meta get() = this@filterByType.meta
|
||||||
|
|
||||||
|
private fun checkDatum(name: Name, datum: Data<*>): Boolean = datum.type.isSubtypeOf(type)
|
||||||
|
&& predicate(name, datum.meta)
|
||||||
|
|
||||||
|
override fun iterator(): Iterator<NamedData<R>> = iterator {
|
||||||
|
for(d in this@filterByType){
|
||||||
|
if(checkDatum(d.name,d.data)){
|
||||||
|
@Suppress("UNCHECKED_CAST")
|
||||||
|
yield(d as NamedData<R>)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun get(name: Name): Data<R>? = this@filterByType[name]?.let { datum ->
|
||||||
|
if (checkDatum(name, datum)) datum.castOrNull(type) else null
|
||||||
|
}
|
||||||
|
|
||||||
|
override val updates: Flow<Name> = this@filterByType.updates.filter { name ->
|
||||||
|
get(name)?.let { datum ->
|
||||||
|
checkDatum(name, datum)
|
||||||
|
} ?: false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Select a single datum of the appropriate type
|
||||||
|
*/
|
||||||
|
public inline fun <reified R : Any> DataSet<*>.filterByType(
|
||||||
|
noinline predicate: (name: Name, meta: Meta) -> Boolean = { _, _ -> true },
|
||||||
|
): DataSet<R> = filterByType(typeOf<R>(), predicate)
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Select a single datum if it is present and of given [type]
|
||||||
|
*/
|
||||||
|
public fun <R : Any> DataSet<*>.getByType(type: KType, name: Name): NamedData<R>? =
|
||||||
|
get(name)?.castOrNull<R>(type)?.named(name)
|
||||||
|
|
||||||
|
public inline fun <reified R : Any> DataSet<*>.getByType(name: Name): NamedData<R>? =
|
||||||
|
this@getByType.getByType(typeOf<R>(), name)
|
||||||
|
|
||||||
|
public inline fun <reified R : Any> DataSet<*>.getByType(name: String): NamedData<R>? =
|
||||||
|
this@getByType.getByType(typeOf<R>(), Name.parse(name))
|
@ -0,0 +1,40 @@
|
|||||||
|
package space.kscience.dataforge.data
|
||||||
|
|
||||||
|
import kotlinx.coroutines.CoroutineScope
|
||||||
|
import kotlinx.coroutines.Job
|
||||||
|
import kotlinx.coroutines.launch
|
||||||
|
import space.kscience.dataforge.names.Name
|
||||||
|
import space.kscience.dataforge.names.plus
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Append data to node
|
||||||
|
*/
|
||||||
|
context(DataSetBuilder<T>) public infix fun <T : Any> String.put(data: Data<T>): Unit =
|
||||||
|
data(Name.parse(this), data)
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Append node
|
||||||
|
*/
|
||||||
|
context(DataSetBuilder<T>) public infix fun <T : Any> String.put(dataSet: DataSet<T>): Unit =
|
||||||
|
node(Name.parse(this), dataSet)
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Build and append node
|
||||||
|
*/
|
||||||
|
context(DataSetBuilder<T>) public infix fun <T : Any> String.put(
|
||||||
|
block: DataSetBuilder<T>.() -> Unit,
|
||||||
|
): Unit = node(Name.parse(this), block)
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Copy given data set and mirror its changes to this [DataTreeBuilder] in [this@setAndObserve]. Returns an update [Job]
|
||||||
|
*/
|
||||||
|
context(DataSetBuilder<T>) public fun <T : Any> CoroutineScope.setAndWatch(
|
||||||
|
name: Name,
|
||||||
|
dataSet: DataSet<T>,
|
||||||
|
): Job = launch {
|
||||||
|
node(name, dataSet)
|
||||||
|
dataSet.updates.collect { nameInBranch ->
|
||||||
|
data(name + nameInBranch, dataSet.get(nameInBranch))
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,50 @@
|
|||||||
|
package space.kscience.dataforge.data
|
||||||
|
|
||||||
|
import kotlinx.coroutines.ExperimentalCoroutinesApi
|
||||||
|
import kotlinx.coroutines.delay
|
||||||
|
import kotlinx.coroutines.test.runTest
|
||||||
|
import org.junit.jupiter.api.Test
|
||||||
|
import space.kscience.dataforge.actions.Action
|
||||||
|
import space.kscience.dataforge.actions.invoke
|
||||||
|
import space.kscience.dataforge.actions.map
|
||||||
|
import space.kscience.dataforge.misc.DFExperimental
|
||||||
|
import kotlin.test.assertEquals
|
||||||
|
|
||||||
|
@OptIn(DFExperimental::class, ExperimentalCoroutinesApi::class)
|
||||||
|
internal class ActionsTest {
|
||||||
|
@Test
|
||||||
|
fun testStaticMapAction() = runTest {
|
||||||
|
val data: DataTree<Int> = DataTree {
|
||||||
|
repeat(10) {
|
||||||
|
static(it.toString(), it)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
val plusOne = Action.map<Int, Int> {
|
||||||
|
result { it + 1 }
|
||||||
|
}
|
||||||
|
val result = plusOne(data)
|
||||||
|
assertEquals(2, result["1"]?.await())
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
fun testDynamicMapAction() = runTest {
|
||||||
|
val data: DataSourceBuilder<Int> = DataSource()
|
||||||
|
|
||||||
|
val plusOne = Action.map<Int, Int> {
|
||||||
|
result { it + 1 }
|
||||||
|
}
|
||||||
|
|
||||||
|
val result = plusOne(data)
|
||||||
|
|
||||||
|
repeat(10) {
|
||||||
|
data.static(it.toString(), it)
|
||||||
|
}
|
||||||
|
|
||||||
|
delay(20)
|
||||||
|
|
||||||
|
assertEquals(2, result["1"]?.await())
|
||||||
|
data.close()
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
@ -0,0 +1,91 @@
|
|||||||
|
package space.kscience.dataforge.data
|
||||||
|
|
||||||
|
import kotlinx.coroutines.*
|
||||||
|
import space.kscience.dataforge.misc.DFExperimental
|
||||||
|
import space.kscience.dataforge.names.asName
|
||||||
|
import kotlin.test.Test
|
||||||
|
import kotlin.test.assertEquals
|
||||||
|
|
||||||
|
|
||||||
|
internal class DataTreeBuilderTest {
|
||||||
|
@Test
|
||||||
|
fun testTreeBuild() = runBlocking {
|
||||||
|
val node = DataTree<Any> {
|
||||||
|
"primary" put {
|
||||||
|
static("a", "a")
|
||||||
|
static("b", "b")
|
||||||
|
}
|
||||||
|
static("c.d", "c.d")
|
||||||
|
static("c.f", "c.f")
|
||||||
|
}
|
||||||
|
runBlocking {
|
||||||
|
assertEquals("a", node["primary.a"]?.await())
|
||||||
|
assertEquals("b", node["primary.b"]?.await())
|
||||||
|
assertEquals("c.d", node["c.d"]?.await())
|
||||||
|
assertEquals("c.f", node["c.f"]?.await())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@OptIn(DFExperimental::class)
|
||||||
|
@Test
|
||||||
|
fun testDataUpdate() = runBlocking {
|
||||||
|
val updateData: DataTree<Any> = DataTree {
|
||||||
|
"update" put {
|
||||||
|
"a" put Data.static("a")
|
||||||
|
"b" put Data.static("b")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
val node = DataTree<Any> {
|
||||||
|
"primary" put {
|
||||||
|
static("a", "a")
|
||||||
|
static("b", "b")
|
||||||
|
}
|
||||||
|
static("root", "root")
|
||||||
|
populateFrom(updateData)
|
||||||
|
}
|
||||||
|
|
||||||
|
runBlocking {
|
||||||
|
assertEquals("a", node["update.a"]?.await())
|
||||||
|
assertEquals("a", node["primary.a"]?.await())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
fun testDynamicUpdates() = runBlocking {
|
||||||
|
try {
|
||||||
|
lateinit var updateJob: Job
|
||||||
|
supervisorScope {
|
||||||
|
val subNode = DataSource<Int> {
|
||||||
|
updateJob = launch {
|
||||||
|
repeat(10) {
|
||||||
|
delay(10)
|
||||||
|
static("value", it)
|
||||||
|
}
|
||||||
|
delay(10)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
launch {
|
||||||
|
subNode.updatesWithData.collect {
|
||||||
|
println(it)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
val rootNode = DataSource<Int> {
|
||||||
|
setAndWatch("sub".asName(), subNode)
|
||||||
|
}
|
||||||
|
|
||||||
|
launch {
|
||||||
|
rootNode.updatesWithData.collect {
|
||||||
|
println(it)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
updateJob.join()
|
||||||
|
assertEquals(9, rootNode["sub.value"]?.await())
|
||||||
|
cancel()
|
||||||
|
}
|
||||||
|
} catch (t: Throwable) {
|
||||||
|
if (t !is CancellationException) throw t
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
@ -1,14 +0,0 @@
|
|||||||
package hep.dataforge.data
|
|
||||||
|
|
||||||
import kotlin.reflect.KClass
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Check that node is compatible with given type meaning that each element could be cast to the type
|
|
||||||
*/
|
|
||||||
internal actual fun <R : Any> DataNode<*>.canCast(type: KClass<out R>): Boolean {
|
|
||||||
return this.type == type
|
|
||||||
}
|
|
||||||
|
|
||||||
internal actual fun <R : Any> Data<*>.canCast(type: KClass<out R>): Boolean {
|
|
||||||
return this.type == type
|
|
||||||
}
|
|
23
dataforge-io/README.md
Normal file
23
dataforge-io/README.md
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
# Module dataforge-io
|
||||||
|
|
||||||
|
IO module
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
## Artifact:
|
||||||
|
|
||||||
|
The Maven coordinates of this project are `space.kscience:dataforge-io:0.7.0`.
|
||||||
|
|
||||||
|
**Gradle Kotlin DSL:**
|
||||||
|
```kotlin
|
||||||
|
repositories {
|
||||||
|
maven("https://repo.kotlin.link")
|
||||||
|
//uncomment to access development builds
|
||||||
|
//maven("https://maven.pkg.jetbrains.space/spc/p/sci/dev")
|
||||||
|
mavenCentral()
|
||||||
|
}
|
||||||
|
|
||||||
|
dependencies {
|
||||||
|
implementation("space.kscience:dataforge-io:0.7.0")
|
||||||
|
}
|
||||||
|
```
|
@ -7,7 +7,7 @@ public final class hep/dataforge/io/BinaryMetaFormat : hep/dataforge/io/MetaForm
|
|||||||
public fun invoke (Lhep/dataforge/meta/Meta;Lhep/dataforge/context/Context;)Lhep/dataforge/io/MetaFormat;
|
public fun invoke (Lhep/dataforge/meta/Meta;Lhep/dataforge/context/Context;)Lhep/dataforge/io/MetaFormat;
|
||||||
public synthetic fun invoke (Lhep/dataforge/meta/Meta;Lhep/dataforge/context/Context;)Ljava/lang/Object;
|
public synthetic fun invoke (Lhep/dataforge/meta/Meta;Lhep/dataforge/context/Context;)Ljava/lang/Object;
|
||||||
public fun readMeta (Lkotlinx/io/Input;Lhep/dataforge/meta/descriptors/NodeDescriptor;)Lhep/dataforge/meta/Meta;
|
public fun readMeta (Lkotlinx/io/Input;Lhep/dataforge/meta/descriptors/NodeDescriptor;)Lhep/dataforge/meta/Meta;
|
||||||
public final fun readMetaItem (Lkotlinx/io/Input;)Lhep/dataforge/meta/MetaItem;
|
public final fun readMetaItem (Lkotlinx/io/Input;)Lhep/dataforge/meta/TypedMetaItem;
|
||||||
public fun readObject (Lkotlinx/io/Input;)Lhep/dataforge/meta/Meta;
|
public fun readObject (Lkotlinx/io/Input;)Lhep/dataforge/meta/Meta;
|
||||||
public synthetic fun readObject (Lkotlinx/io/Input;)Ljava/lang/Object;
|
public synthetic fun readObject (Lkotlinx/io/Input;)Ljava/lang/Object;
|
||||||
public fun toMeta ()Lhep/dataforge/meta/Meta;
|
public fun toMeta ()Lhep/dataforge/meta/Meta;
|
||||||
@ -77,6 +77,10 @@ public final class hep/dataforge/io/EnvelopeBuilder : hep/dataforge/io/Envelope
|
|||||||
public final fun setType (Ljava/lang/String;)V
|
public final fun setType (Ljava/lang/String;)V
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public final class hep/dataforge/io/EnvelopeBuilderKt {
|
||||||
|
public static final fun Envelope (Lkotlin/jvm/functions/Function1;)Lhep/dataforge/io/Envelope;
|
||||||
|
}
|
||||||
|
|
||||||
public abstract interface class hep/dataforge/io/EnvelopeFormat : hep/dataforge/io/IOFormat {
|
public abstract interface class hep/dataforge/io/EnvelopeFormat : hep/dataforge/io/IOFormat {
|
||||||
public abstract fun getDefaultMetaFormat ()Lhep/dataforge/io/MetaFormatFactory;
|
public abstract fun getDefaultMetaFormat ()Lhep/dataforge/io/MetaFormatFactory;
|
||||||
public abstract fun readObject (Lkotlinx/io/Input;)Lhep/dataforge/io/Envelope;
|
public abstract fun readObject (Lkotlinx/io/Input;)Lhep/dataforge/io/Envelope;
|
||||||
@ -215,8 +219,8 @@ public final class hep/dataforge/io/IOPlugin : hep/dataforge/context/AbstractPlu
|
|||||||
public final fun getIoFormatFactories ()Ljava/util/Collection;
|
public final fun getIoFormatFactories ()Ljava/util/Collection;
|
||||||
public final fun getMetaFormatFactories ()Ljava/util/Collection;
|
public final fun getMetaFormatFactories ()Ljava/util/Collection;
|
||||||
public fun getTag ()Lhep/dataforge/context/PluginTag;
|
public fun getTag ()Lhep/dataforge/context/PluginTag;
|
||||||
public final fun resolveEnvelopeFormat (Lhep/dataforge/meta/MetaItem;)Lhep/dataforge/io/EnvelopeFormat;
|
public final fun resolveEnvelopeFormat (Lhep/dataforge/meta/TypedMetaItem;)Lhep/dataforge/io/EnvelopeFormat;
|
||||||
public final fun resolveIOFormat (Lhep/dataforge/meta/MetaItem;Lkotlin/reflect/KClass;)Lhep/dataforge/io/IOFormat;
|
public final fun resolveIOFormat (Lhep/dataforge/meta/TypedMetaItem;Lkotlin/reflect/KClass;)Lhep/dataforge/io/IOFormat;
|
||||||
public final fun resolveMetaFormat (Ljava/lang/String;Lhep/dataforge/meta/Meta;)Lhep/dataforge/io/MetaFormat;
|
public final fun resolveMetaFormat (Ljava/lang/String;Lhep/dataforge/meta/Meta;)Lhep/dataforge/io/MetaFormat;
|
||||||
public final fun resolveMetaFormat (SLhep/dataforge/meta/Meta;)Lhep/dataforge/io/MetaFormat;
|
public final fun resolveMetaFormat (SLhep/dataforge/meta/Meta;)Lhep/dataforge/io/MetaFormat;
|
||||||
public static synthetic fun resolveMetaFormat$default (Lhep/dataforge/io/IOPlugin;Ljava/lang/String;Lhep/dataforge/meta/Meta;ILjava/lang/Object;)Lhep/dataforge/io/MetaFormat;
|
public static synthetic fun resolveMetaFormat$default (Lhep/dataforge/io/IOPlugin;Ljava/lang/String;Lhep/dataforge/meta/Meta;ILjava/lang/Object;)Lhep/dataforge/io/MetaFormat;
|
||||||
|
@ -1,25 +1,26 @@
|
|||||||
plugins {
|
plugins {
|
||||||
id("ru.mipt.npm.mpp")
|
id("space.kscience.gradle.mpp")
|
||||||
id("ru.mipt.npm.native")
|
|
||||||
}
|
}
|
||||||
|
|
||||||
description = "IO module"
|
description = "IO module"
|
||||||
|
|
||||||
|
val ioVersion = "0.2.1"
|
||||||
|
|
||||||
kscience {
|
kscience {
|
||||||
useSerialization(sourceSet = ru.mipt.npm.gradle.DependencySourceSet.TEST) {
|
jvm()
|
||||||
|
js()
|
||||||
|
native()
|
||||||
|
useSerialization()
|
||||||
|
useSerialization(sourceSet = space.kscience.gradle.DependencySourceSet.TEST) {
|
||||||
cbor()
|
cbor()
|
||||||
}
|
}
|
||||||
|
dependencies {
|
||||||
|
api(projects.dataforgeContext)
|
||||||
|
api("org.jetbrains.kotlinx:kotlinx-io-core:$ioVersion")
|
||||||
|
api("org.jetbrains.kotlinx:kotlinx-io-bytestring:$ioVersion")
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
val ioVersion by rootProject.extra("0.2.0-npm-dev-11")
|
readme{
|
||||||
|
maturity = space.kscience.gradle.Maturity.EXPERIMENTAL
|
||||||
kotlin {
|
|
||||||
sourceSets {
|
|
||||||
commonMain {
|
|
||||||
dependencies {
|
|
||||||
api(project(":dataforge-context"))
|
|
||||||
api("org.jetbrains.kotlinx:kotlinx-io:$ioVersion")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
23
dataforge-io/dataforge-io-yaml/README.md
Normal file
23
dataforge-io/dataforge-io-yaml/README.md
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
# Module dataforge-io-yaml
|
||||||
|
|
||||||
|
YAML meta IO
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
## Artifact:
|
||||||
|
|
||||||
|
The Maven coordinates of this project are `space.kscience:dataforge-io-yaml:0.7.0`.
|
||||||
|
|
||||||
|
**Gradle Kotlin DSL:**
|
||||||
|
```kotlin
|
||||||
|
repositories {
|
||||||
|
maven("https://repo.kotlin.link")
|
||||||
|
//uncomment to access development builds
|
||||||
|
//maven("https://maven.pkg.jetbrains.space/spc/p/sci/dev")
|
||||||
|
mavenCentral()
|
||||||
|
}
|
||||||
|
|
||||||
|
dependencies {
|
||||||
|
implementation("space.kscience:dataforge-io-yaml:0.7.0")
|
||||||
|
}
|
||||||
|
```
|
@ -57,3 +57,10 @@ public final class hep/dataforge/io/yaml/YamlMetaFormat$Companion : hep/dataforg
|
|||||||
public synthetic fun writeObject (Lkotlinx/io/Output;Ljava/lang/Object;)V
|
public synthetic fun writeObject (Lkotlinx/io/Output;Ljava/lang/Object;)V
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public final class hep/dataforge/io/yaml/YamlMetaFormatKt {
|
||||||
|
public static final fun toMeta (Lnet/mamoe/yamlkt/YamlMap;)Lhep/dataforge/meta/Meta;
|
||||||
|
public static final fun toMetaItem (Lnet/mamoe/yamlkt/YamlElement;Lhep/dataforge/meta/descriptors/ItemDescriptor;)Lhep/dataforge/meta/TypedMetaItem;
|
||||||
|
public static synthetic fun toMetaItem$default (Lnet/mamoe/yamlkt/YamlElement;Lhep/dataforge/meta/descriptors/ItemDescriptor;ILjava/lang/Object;)Lhep/dataforge/meta/TypedMetaItem;
|
||||||
|
public static final fun toYaml (Lhep/dataforge/meta/Meta;)Lnet/mamoe/yamlkt/YamlMap;
|
||||||
|
}
|
||||||
|
|
||||||
|
@ -1,16 +1,24 @@
|
|||||||
plugins {
|
plugins {
|
||||||
id("ru.mipt.npm.jvm")
|
id("space.kscience.gradle.mpp")
|
||||||
}
|
}
|
||||||
|
|
||||||
description = "YAML meta IO"
|
description = "YAML meta IO"
|
||||||
|
|
||||||
kscience {
|
kscience {
|
||||||
useSerialization {
|
jvm()
|
||||||
yaml()
|
js()
|
||||||
|
native()
|
||||||
|
dependencies {
|
||||||
|
api(projects.dataforgeIo)
|
||||||
|
}
|
||||||
|
useSerialization{
|
||||||
|
yamlKt()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
dependencies {
|
readme{
|
||||||
api(project(":dataforge-io"))
|
maturity = space.kscience.gradle.Maturity.PROTOTYPE
|
||||||
api("org.yaml:snakeyaml:1.26")
|
description ="""
|
||||||
|
YAML meta converters and Front Matter envelope format
|
||||||
|
""".trimIndent()
|
||||||
}
|
}
|
||||||
|
@ -0,0 +1,97 @@
|
|||||||
|
package space.kscience.dataforge.io.yaml
|
||||||
|
|
||||||
|
import kotlinx.io.Sink
|
||||||
|
import kotlinx.io.Source
|
||||||
|
import kotlinx.io.bytestring.ByteString
|
||||||
|
import kotlinx.io.bytestring.encodeToByteString
|
||||||
|
import kotlinx.io.readByteString
|
||||||
|
import kotlinx.io.writeString
|
||||||
|
import space.kscience.dataforge.context.Context
|
||||||
|
import space.kscience.dataforge.context.Global
|
||||||
|
import space.kscience.dataforge.io.*
|
||||||
|
import space.kscience.dataforge.meta.Meta
|
||||||
|
import space.kscience.dataforge.names.Name
|
||||||
|
import space.kscience.dataforge.names.plus
|
||||||
|
|
||||||
|
public class FrontMatterEnvelopeFormat(
|
||||||
|
private val io: IOPlugin,
|
||||||
|
private val meta: Meta = Meta.EMPTY,
|
||||||
|
private val metaFormatFactory: MetaFormatFactory = YamlMetaFormat,
|
||||||
|
) : EnvelopeFormat {
|
||||||
|
|
||||||
|
override fun readFrom(binary: Binary): Envelope = binary.read {
|
||||||
|
var offset = 0
|
||||||
|
|
||||||
|
offset += discardWithSeparator(
|
||||||
|
SEPARATOR,
|
||||||
|
atMost = 1024,
|
||||||
|
)
|
||||||
|
|
||||||
|
val line = ByteArray {
|
||||||
|
offset += readWithSeparatorTo(this, "\n".encodeToByteString())
|
||||||
|
}.decodeToString()
|
||||||
|
|
||||||
|
val readMetaFormat = line.trim().takeIf { it.isNotBlank() }?.let { io.resolveMetaFormat(it) } ?: YamlMetaFormat
|
||||||
|
|
||||||
|
val packet = ByteArray {
|
||||||
|
offset += readWithSeparatorTo(this, SEPARATOR)
|
||||||
|
}
|
||||||
|
|
||||||
|
offset += discardLine()
|
||||||
|
|
||||||
|
val meta = readMetaFormat.readFrom(packet.asBinary())
|
||||||
|
Envelope(meta, binary.view(offset))
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun readFrom(source: Source): Envelope = readFrom(source.readBinary())
|
||||||
|
|
||||||
|
override fun writeTo(
|
||||||
|
sink: Sink,
|
||||||
|
obj: Envelope,
|
||||||
|
) {
|
||||||
|
val metaFormat = metaFormatFactory.build(io.context, meta)
|
||||||
|
val formatSuffix = if (metaFormat is YamlMetaFormat) "" else metaFormatFactory.shortName
|
||||||
|
sink.writeString("$SEPARATOR${formatSuffix}\r\n")
|
||||||
|
metaFormat.run { metaFormat.writeTo(sink, obj.meta) }
|
||||||
|
sink.writeString("$SEPARATOR\r\n")
|
||||||
|
//Printing data
|
||||||
|
obj.data?.let { data ->
|
||||||
|
sink.writeBinary(data)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public companion object : EnvelopeFormatFactory {
|
||||||
|
public val SEPARATOR: ByteString = "---".encodeToByteString()
|
||||||
|
|
||||||
|
private val metaTypeRegex = "---(\\w*)\\s*".toRegex()
|
||||||
|
|
||||||
|
override val name: Name = EnvelopeFormatFactory.ENVELOPE_FACTORY_NAME + "frontMatter"
|
||||||
|
|
||||||
|
override fun build(context: Context, meta: Meta): EnvelopeFormat {
|
||||||
|
return FrontMatterEnvelopeFormat(context.io, meta)
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun peekFormat(io: IOPlugin, binary: Binary): EnvelopeFormat? = binary.read {
|
||||||
|
//read raw string to avoid UTF issues
|
||||||
|
val line = readByteString(3)
|
||||||
|
return@read if (line == "---".encodeToByteString()) {
|
||||||
|
default
|
||||||
|
} else {
|
||||||
|
null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private val default by lazy { build(Global, Meta.EMPTY) }
|
||||||
|
|
||||||
|
override fun readFrom(binary: Binary): Envelope = default.readFrom(binary)
|
||||||
|
|
||||||
|
override fun writeTo(
|
||||||
|
sink: Sink,
|
||||||
|
obj: Envelope,
|
||||||
|
): Unit = default.writeTo(sink, obj)
|
||||||
|
|
||||||
|
|
||||||
|
override fun readFrom(source: Source): Envelope = default.readFrom(source)
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,118 @@
|
|||||||
|
package space.kscience.dataforge.io.yaml
|
||||||
|
|
||||||
|
import kotlinx.io.Sink
|
||||||
|
import kotlinx.io.Source
|
||||||
|
import kotlinx.io.readString
|
||||||
|
import kotlinx.io.writeString
|
||||||
|
import net.mamoe.yamlkt.*
|
||||||
|
import space.kscience.dataforge.context.Context
|
||||||
|
import space.kscience.dataforge.io.MetaFormat
|
||||||
|
import space.kscience.dataforge.io.MetaFormatFactory
|
||||||
|
import space.kscience.dataforge.meta.*
|
||||||
|
import space.kscience.dataforge.meta.descriptors.MetaDescriptor
|
||||||
|
import space.kscience.dataforge.meta.descriptors.get
|
||||||
|
import space.kscience.dataforge.names.NameToken
|
||||||
|
import space.kscience.dataforge.names.withIndex
|
||||||
|
import kotlin.collections.component1
|
||||||
|
import kotlin.collections.component2
|
||||||
|
import kotlin.collections.set
|
||||||
|
|
||||||
|
public fun Meta.toYaml(): YamlMap {
|
||||||
|
val map: Map<String, Any?> = items.entries.associate { (key, item) ->
|
||||||
|
key.toString() to if (item.isLeaf) {
|
||||||
|
item.value?.value
|
||||||
|
} else {
|
||||||
|
item.toYaml()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return YamlMap(map)
|
||||||
|
}
|
||||||
|
|
||||||
|
private class YamlMeta(private val yamlMap: YamlMap, private val descriptor: MetaDescriptor? = null) : Meta {
|
||||||
|
|
||||||
|
override val value: Value?
|
||||||
|
get() = yamlMap.getStringOrNull(null)?.let { Value.parse(it) }
|
||||||
|
|
||||||
|
private fun buildItems(): Map<NameToken, Meta> {
|
||||||
|
val map = LinkedHashMap<NameToken, Meta>()
|
||||||
|
|
||||||
|
yamlMap.content.entries.forEach { (key, value) ->
|
||||||
|
val stringKey = key.toString()
|
||||||
|
val itemDescriptor = descriptor?.get(stringKey)
|
||||||
|
val token = NameToken(stringKey)
|
||||||
|
when (value) {
|
||||||
|
YamlNull -> Meta(Null)
|
||||||
|
is YamlLiteral -> map[token] = Meta(Value.parse(value.content))
|
||||||
|
is YamlMap -> map[token] = value.toMeta()
|
||||||
|
is YamlList -> if (value.all { it is YamlLiteral }) {
|
||||||
|
val listValue = ListValue(
|
||||||
|
value.map {
|
||||||
|
//We already checked that all values are primitives
|
||||||
|
Value.parse((it as YamlLiteral).content)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
map[token] = Meta(listValue)
|
||||||
|
} else value.forEachIndexed { index, yamlElement ->
|
||||||
|
val indexKey = itemDescriptor?.indexKey
|
||||||
|
val indexValue: String = (yamlElement as? YamlMap)?.getStringOrNull(indexKey)
|
||||||
|
?: index.toString() //In case index is non-string, the backward transformation will be broken.
|
||||||
|
|
||||||
|
val tokenWithIndex = token.withIndex(indexValue)
|
||||||
|
map[tokenWithIndex] = yamlElement.toMeta(itemDescriptor)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return map
|
||||||
|
}
|
||||||
|
|
||||||
|
override val items: Map<NameToken, Meta> get() = buildItems()
|
||||||
|
|
||||||
|
override fun toString(): String = Meta.toString(this)
|
||||||
|
override fun equals(other: Any?): Boolean = Meta.equals(this, other as? Meta)
|
||||||
|
override fun hashCode(): Int = Meta.hashCode(this)
|
||||||
|
}
|
||||||
|
|
||||||
|
public fun YamlElement.toMeta(descriptor: MetaDescriptor? = null): Meta = when (this) {
|
||||||
|
YamlNull -> Meta(Null)
|
||||||
|
is YamlLiteral -> Meta(Value.parse(content))
|
||||||
|
is YamlMap -> toMeta()
|
||||||
|
//We can't return multiple items therefore we create top level node
|
||||||
|
is YamlList -> YamlMap(mapOf("@yamlArray" to this)).toMeta(descriptor)
|
||||||
|
}
|
||||||
|
|
||||||
|
public fun YamlMap.toMeta(): Meta = YamlMeta(this)
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Represent meta as Yaml
|
||||||
|
*/
|
||||||
|
public class YamlMetaFormat(private val meta: Meta) : MetaFormat {
|
||||||
|
|
||||||
|
override fun writeMeta(sink: Sink, meta: Meta, descriptor: MetaDescriptor?) {
|
||||||
|
val yaml: YamlMap = meta.toYaml()
|
||||||
|
val string = Yaml.encodeToString(YamlMap.serializer(), yaml)
|
||||||
|
sink.writeString(string)
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun readMeta(source: Source, descriptor: MetaDescriptor?): Meta {
|
||||||
|
val yaml = Yaml.decodeYamlMapFromString(source.readString())
|
||||||
|
return yaml.toMeta()
|
||||||
|
}
|
||||||
|
|
||||||
|
public companion object : MetaFormatFactory {
|
||||||
|
override fun build(context: Context, meta: Meta): MetaFormat = YamlMetaFormat(meta)
|
||||||
|
|
||||||
|
override val shortName: String = "yaml"
|
||||||
|
|
||||||
|
override val key: Short = 0x594d //YM
|
||||||
|
|
||||||
|
private val default = YamlMetaFormat(Meta.EMPTY)
|
||||||
|
|
||||||
|
override fun writeMeta(sink: Sink, meta: Meta, descriptor: MetaDescriptor?): Unit =
|
||||||
|
default.writeMeta(sink, meta, descriptor)
|
||||||
|
|
||||||
|
override fun readMeta(source: Source, descriptor: MetaDescriptor?): Meta =
|
||||||
|
default.readMeta(source, descriptor)
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,30 @@
|
|||||||
|
package space.kscience.dataforge.io.yaml
|
||||||
|
|
||||||
|
import space.kscience.dataforge.context.AbstractPlugin
|
||||||
|
import space.kscience.dataforge.context.Context
|
||||||
|
import space.kscience.dataforge.context.PluginFactory
|
||||||
|
import space.kscience.dataforge.context.PluginTag
|
||||||
|
import space.kscience.dataforge.io.EnvelopeFormatFactory
|
||||||
|
import space.kscience.dataforge.io.IOPlugin
|
||||||
|
import space.kscience.dataforge.io.MetaFormatFactory
|
||||||
|
import space.kscience.dataforge.meta.Meta
|
||||||
|
import space.kscience.dataforge.names.Name
|
||||||
|
import space.kscience.dataforge.names.asName
|
||||||
|
|
||||||
|
public class YamlPlugin(meta: Meta) : AbstractPlugin(meta) {
|
||||||
|
public val io: IOPlugin by require(IOPlugin)
|
||||||
|
|
||||||
|
override val tag: PluginTag get() = Companion.tag
|
||||||
|
|
||||||
|
override fun content(target: String): Map<Name, Any> = when (target) {
|
||||||
|
MetaFormatFactory.META_FORMAT_TYPE -> mapOf("yaml".asName() to YamlMetaFormat)
|
||||||
|
EnvelopeFormatFactory.ENVELOPE_FORMAT_TYPE -> mapOf(FrontMatterEnvelopeFormat.name to FrontMatterEnvelopeFormat)
|
||||||
|
else -> super.content(target)
|
||||||
|
}
|
||||||
|
|
||||||
|
public companion object : PluginFactory<YamlPlugin> {
|
||||||
|
override val tag: PluginTag = PluginTag("io.yaml", group = PluginTag.DATAFORGE_GROUP)
|
||||||
|
|
||||||
|
override fun build(context: Context, meta: Meta): YamlPlugin = YamlPlugin(meta)
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,37 @@
|
|||||||
|
@file:OptIn(DFExperimental::class)
|
||||||
|
|
||||||
|
package space.kscience.dataforge.io.yaml
|
||||||
|
|
||||||
|
import space.kscience.dataforge.context.Context
|
||||||
|
import space.kscience.dataforge.io.io
|
||||||
|
import space.kscience.dataforge.io.readEnvelope
|
||||||
|
import space.kscience.dataforge.io.toByteArray
|
||||||
|
import space.kscience.dataforge.meta.get
|
||||||
|
import space.kscience.dataforge.meta.string
|
||||||
|
import space.kscience.dataforge.misc.DFExperimental
|
||||||
|
import kotlin.test.Test
|
||||||
|
import kotlin.test.assertEquals
|
||||||
|
|
||||||
|
internal class FrontMatterEnvelopeFormatTest {
|
||||||
|
|
||||||
|
val context = Context {
|
||||||
|
plugin(YamlPlugin)
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
fun frontMatter(){
|
||||||
|
val text = """
|
||||||
|
---
|
||||||
|
content_type: magprog
|
||||||
|
magprog_section: contacts
|
||||||
|
section_title: Контакты
|
||||||
|
language: ru
|
||||||
|
---
|
||||||
|
Some text here
|
||||||
|
""".trimIndent()
|
||||||
|
|
||||||
|
val envelope = context.io.readEnvelope(text)
|
||||||
|
assertEquals("Some text here", envelope.data!!.toByteArray().decodeToString().trim())
|
||||||
|
assertEquals("magprog", envelope.meta["content_type"].string)
|
||||||
|
}
|
||||||
|
}
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user