Compare commits

...

99 Commits

Author SHA1 Message Date
c754dc3471 Merge pull request '0.7.0' (!79) from dev into master
Reviewed-on: #79
2023-11-26 10:08:10 +03:00
8f7d754301 Update gradle 2023-11-26 10:07:39 +03:00
c923c3e7d3 Merge remote-tracking branch 'spc/master' into dev 2023-11-26 10:06:39 +03:00
99b2d941c8 Update Readme and CHANGELOG 2023-11-26 10:05:19 +03:00
191af77f57 0.7.0 2023-11-26 09:59:28 +03:00
79759c5256 Replace MetaProvider::getMeta by get 2023-11-22 17:06:23 +03:00
2eb965e563 Type -> DfId
Add descriptor to MetaConverter
2023-11-18 22:48:10 +03:00
1b29e377ca Type -> DfId
Add descriptor to MetaConverter
2023-11-18 22:43:53 +03:00
2634a19285 deprecate String.parseValue 2023-11-12 14:26:28 +03:00
261c415d3d Meta converters return non-nullables 2023-10-29 21:39:40 +03:00
e52d509c2b Add Name.last extension 2023-10-29 10:48:33 +03:00
706521a6b6 Suppress manual cast warnings in task builders 2023-10-24 10:52:41 +03:00
94000689da Merge branch 'beta/kotlin-1.9.20' into dev
# Conflicts:
#	CHANGELOG.md
#	build.gradle.kts
#	dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/FileWorkspaceCache.kt
#	dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/FileDataTest.kt
#	gradle.properties
2023-10-24 09:04:37 +03:00
851fdda311 Fix memory leak in sealed builder. Fine-grained Meta builders. 2023-10-08 18:21:04 +03:00
cbbcd18df3 Refactor io. 2023-10-08 18:20:23 +03:00
dc2bf5da83 Merge branch 'kotlinx-io' into dev
# Conflicts:
#	dataforge-io/build.gradle.kts
#	dataforge-io/src/commonMain/kotlin/space/kscience/dataforge/io/JsonMetaFormat.kt
#	gradle.properties
2023-10-08 16:55:47 +03:00
f732b85cc5 kotlin 1.9.20 2023-09-16 15:20:31 +03:00
259b882e63 kotlin 1.9.20 2023-09-13 12:19:20 +03:00
526f230300 Merge branch 'kotlinx-io' into beta/kotlin-1.9.20
# Conflicts:
#	dataforge-io/build.gradle.kts
#	dataforge-io/src/commonMain/kotlin/space/kscience/dataforge/io/JsonMetaFormat.kt
#	gradle.properties
2023-09-13 08:28:43 +03:00
SPC-code
7fca5db390
Merge pull request #78 from SciProgCentre/dependabot/github_actions/dot-github/workflows/gradle/gradle-build-action-2.4.2
Bump gradle/gradle-build-action from 2.1.5 to 2.4.2 in /.github/workflows
2023-07-29 13:15:37 +03:00
dependabot[bot]
be160ba98a
Bump gradle/gradle-build-action in /.github/workflows
Bumps [gradle/gradle-build-action](https://github.com/gradle/gradle-build-action) from 2.1.5 to 2.4.2.
- [Release notes](https://github.com/gradle/gradle-build-action/releases)
- [Commits](https://github.com/gradle/gradle-build-action/compare/v2.1.5...v2.4.2)

---
updated-dependencies:
- dependency-name: gradle/gradle-build-action
  dependency-type: direct:production
...

Signed-off-by: dependabot[bot] <support@github.com>
2023-07-29 10:14:01 +00:00
SPC-code
182f206b88
Merge pull request #77 from SciProgCentre/dev
0.6.2
2023-07-29 13:13:38 +03:00
3806f97c77 update documentation 2023-07-29 13:00:17 +03:00
d5ebef404f update documentation 2023-07-29 12:59:55 +03:00
3644533043 update documentation 2023-07-29 12:56:35 +03:00
ee5afcdafe Update meta serialization rules 2023-07-29 12:42:14 +03:00
de476fb273 Fix json serialization. 2023-07-26 16:47:12 +03:00
a136db16ff Refactoring to kotlinx-io complete 2023-07-15 10:49:42 +03:00
a699c36f8e [WIP] Refactoring to kotlinx-io 2023-07-11 23:03:52 +03:00
2aba1b48dc [WIP] Refactoring to kotlinx-io 2023-07-10 10:06:39 +03:00
cfa20eedba Update version to 1.9 2023-07-09 21:38:33 +03:00
SPC-code
f78f0f814f
Create LICENSE 2023-07-02 14:34:52 +03:00
4e7ead0763 Merge remote-tracking branch 'space/master' into dev
# Conflicts:
#	gradle.properties
2023-03-31 14:14:34 +03:00
4543648cda Fix SchemeMeta.attach 2023-03-31 14:00:55 +03:00
b6949310ea Prepare for 0.6.1 release 2023-03-31 09:38:34 +03:00
2c2f33427a Fixes in Envelope format and data tre 2023-03-27 09:45:51 +03:00
29fa30fb51 add type checks for inMemoryCache 2023-03-20 18:34:03 +03:00
f3afb5e9fe File-based workspace caching 2023-03-20 17:53:40 +03:00
61c8df9eb0 Minor update to workspace cache 2023-03-09 10:56:35 +03:00
707b59e6fc Update version 2023-02-24 10:05:01 +03:00
add400b324 Update SPC deployment address 2023-02-20 18:37:24 +03:00
58c5355e25 Fix bug in plugin isAttached 2023-02-20 18:31:56 +03:00
f83b759e75 Optimize Name::cutFirst and Name::cutLast 2023-02-18 19:50:09 +03:00
7d88f828d7 Move to gradle plugin 0.14 2023-02-18 19:49:41 +03:00
5d7ddb4e00 Simplify DFTL. fix io bugs 2023-01-25 18:56:19 +03:00
82838b6a92 Workspace cache 2023-01-08 12:44:31 +03:00
e41fdfc086
Fix naming conventions in names 2023-01-08 11:03:58 +03:00
4117a05df4
Add in-memory caching for workspaces 2022-10-03 20:36:28 +03:00
5406a6a64c
Optimize Name hashCode 2022-08-23 10:58:09 +03:00
0cc4dc0db7
Optimize Name hashCode 2022-08-23 10:55:00 +03:00
233639f0b6
Optimize work with names and tokens 2022-08-22 14:16:16 +03:00
70bd92f019
Add debug level to JS logger 2022-08-14 15:09:51 +03:00
f8eea45ed0
add readOnly to the descriptor 2022-08-13 19:16:18 +03:00
e14c0a695e
fix withDefault 2022-08-11 17:59:31 +03:00
3c1fe23366
Values package merged into meta 2022-08-08 15:09:16 +03:00
81e2ad06cc
refactor MutableMeta set method to accept wider range of receivers 2022-08-01 18:12:57 +03:00
6ca76cff17
Update output types of DataTree builders 2022-07-02 19:34:58 +03:00
9a24e1e392
Update output types of DataTree builders 2022-07-02 12:36:41 +03:00
9cceb44a90
Add NameToket.toStringUnescaped 2022-06-24 18:44:36 +03:00
0b68c1edae
minor API update 2022-06-22 20:00:27 +03:00
b8869570ce
refactor file reading 2022-05-25 19:00:12 +03:00
4833128857
refactor file reading 2022-05-21 11:08:59 +03:00
6bd8a7acbc
return type to IOReader 2022-05-21 10:38:53 +03:00
f5d32ba511
Update build version 2022-05-17 12:24:15 +03:00
0fc2198832
- Remove all unnecessary properties for IOFormat
- Separate interfaces for `IOReader` and `IOWriter`
2022-05-16 18:57:48 +03:00
a546552540
Replace sequences by iterators in DataSet 2022-05-10 14:45:58 +03:00
fe92e8fccf
Data traversal refactoring 2022-05-08 20:56:14 +03:00
7d9189e15c Replace T by Pair<Meta, T> in data reducers 2022-05-07 18:06:55 +03:00
0622bacc4d
Refactor DataSet. Remove suspends where it is possible. 2022-05-04 17:27:56 +03:00
bedab0dc86
Remove experimental flag from YAML 2022-05-03 17:42:00 +03:00
f0820a3bed
Reify types for action builders 2022-05-01 20:23:37 +03:00
665f317e4e
Remove obsolete getData for DataSet 2022-05-01 19:29:13 +03:00
82d37f4b55
Fix text envelope formats partial reads 2022-04-29 18:35:44 +03:00
6d396368b7
Fixe meta file name 2022-04-24 14:44:31 +03:00
77857289f0
DataSet flow to sequence 2022-04-24 09:57:33 +03:00
eaa9d40d60
Change suspend DataSet.getData to operator DataSet.get 2022-04-24 09:19:14 +03:00
6b41163ed3
Fix select.kt 2022-04-17 22:21:11 +03:00
e5000171f1
move to Kotlin 1.6.20 and KTor 2.0 2022-04-17 21:59:09 +03:00
3c6bc15716
move to Kotlin 1.6.20 and KTor 2.0 2022-04-15 18:56:00 +03:00
11143e4ba1
0.5.3-dev-4 2022-03-07 16:12:01 +03:00
91621864c2
add specOrNull delegate 2021-12-18 17:02:17 +03:00
1e97165328
Refactory Factory 2021-12-12 17:58:01 +03:00
Alexander Nozik
be8e971436
all platforms for macos publication 2021-12-12 11:13:35 +03:00
Alexander Nozik
9cc30b1f4e
disable sonatype publishing 2021-12-12 10:48:52 +03:00
Alexander Nozik
7414e60192
Update publish.yml
force disable github publishing
2021-11-30 14:42:00 +03:00
Alexander Nozik
8c0bc05a9a
Merge pull request #74 from mipt-npm/dev
0.5.2
2021-11-30 13:12:20 +03:00
c480cd8e4d
Fix native test 2021-11-30 12:53:52 +03:00
64e0c554cc 0.5.2 release 2021-11-30 11:58:23 +03:00
532e0c253b Add Yaml meta format plugin 2021-11-27 13:36:07 +03:00
c423dc214e Some name refactoring 2021-11-24 20:26:29 +03:00
d178c4ff0d Fix number comparison bug in value 2021-11-02 10:50:38 +03:00
Alexander Nozik
387ab8747e
Merge pull request #72
Kotlin 1.6
2021-10-10 14:38:02 +03:00
3f54eee578 Workaround for https://youtrack.jetbrains.com/issue/KT-48988. Smart building of child contexts 2021-10-10 14:32:32 +03:00
aded38254e Workspace and task updates 2021-10-09 10:49:38 +03:00
00d964eef3 Workspace and task updates 2021-10-08 09:54:11 +03:00
b07d281a83 Kotlin 1.6 2021-09-29 11:08:55 +03:00
81cdd38c40 Build 2021-09-28 12:26:50 +00:00
0ad6852e36 Fix meta node listeners and attachements 2021-08-15 13:20:54 +03:00
a71bb732da Fix meta node listeners and attachements 2021-08-14 18:14:37 +03:00
170 changed files with 4912 additions and 2830 deletions

View File

@ -1,28 +1,31 @@
name: Dokka publication
on:
push:
branches: [ master ]
workflow_dispatch:
release:
types: [ created ]
jobs:
build:
runs-on: ubuntu-20.04
runs-on: ubuntu-latest
timeout-minutes: 40
steps:
- uses: actions/checkout@v2
- uses: DeLaGuardo/setup-graalvm@4.0
- uses: actions/checkout@v3.0.0
- uses: actions/setup-java@v3.0.0
with:
graalvm: 21.2.0
java: java11
arch: amd64
- uses: actions/cache@v2
java-version: 11
distribution: liberica
- name: Cache konan
uses: actions/cache@v3.0.1
with:
path: ~/.gradle/caches
path: ~/.konan
key: ${{ runner.os }}-gradle-${{ hashFiles('*.gradle.kts') }}
restore-keys: |
${{ runner.os }}-gradle-
- run: ./gradlew dokkaHtmlMultiModule --build-cache --no-daemon --no-parallel --stacktrace
- uses: JamesIves/github-pages-deploy-action@4.1.0
- uses: gradle/gradle-build-action@v2.4.2
with:
arguments: dokkaHtmlMultiModule --no-parallel
- uses: JamesIves/github-pages-deploy-action@v4.3.0
with:
branch: gh-pages
folder: build/dokka/htmlMultiModule

View File

@ -14,25 +14,13 @@ jobs:
os: [ macOS-latest, windows-latest ]
runs-on: ${{matrix.os}}
steps:
- name: Checkout the repo
uses: actions/checkout@v2
- name: Set up JDK 11
uses: DeLaGuardo/setup-graalvm@4.0
- uses: actions/checkout@v3.0.0
- uses: actions/setup-java@v3.10.0
with:
graalvm: 21.2.0
java: java11
arch: amd64
- name: Cache gradle
uses: actions/cache@v2
with:
path: |
~/.gradle/caches
~/.gradle/wrapper
key: ${{ runner.os }}-gradle-${{ hashFiles('*.gradle.kts') }}
restore-keys: |
${{ runner.os }}-gradle-
java-version: 11
distribution: liberica
- name: Cache konan
uses: actions/cache@v2
uses: actions/cache@v3.0.1
with:
path: ~/.konan
key: ${{ runner.os }}-gradle-${{ hashFiles('*.gradle.kts') }}
@ -40,14 +28,23 @@ jobs:
${{ runner.os }}-gradle-
- name: Publish Windows Artifacts
if: matrix.os == 'windows-latest'
shell: cmd
run: >
./gradlew release --no-daemon --build-cache -Ppublishing.enabled=true
-Ppublishing.space.user=${{ secrets.SPACE_APP_ID }}
-Ppublishing.space.token=${{ secrets.SPACE_APP_SECRET }}
uses: gradle/gradle-build-action@v2.4.2
with:
arguments: |
publishAllPublicationsToSpaceRepository
-Ppublishing.targets=all
-Ppublishing.space.user=${{ secrets.SPACE_APP_ID }}
-Ppublishing.space.token=${{ secrets.SPACE_APP_SECRET }}
- name: Publish Mac Artifacts
if: matrix.os == 'macOS-latest'
run: >
./gradlew release --no-daemon --build-cache -Ppublishing.enabled=true -Ppublishing.platform=macosX64
-Ppublishing.space.user=${{ secrets.SPACE_APP_ID }}
-Ppublishing.space.token=${{ secrets.SPACE_APP_SECRET }}
uses: gradle/gradle-build-action@v2.4.2
with:
arguments: |
publishMacosX64PublicationToSpaceRepository
publishMacosArm64PublicationToSpaceRepository
publishIosX64PublicationToSpaceRepository
publishIosArm64PublicationToSpaceRepository
publishIosSimulatorArm64PublicationToSpaceRepository
-Ppublishing.targets=all
-Ppublishing.space.user=${{ secrets.SPACE_APP_ID }}
-Ppublishing.space.token=${{ secrets.SPACE_APP_SECRET }}

View File

@ -1,6 +1,7 @@
# Changelog
## [Unreleased]
## Unreleased
### Added
### Changed
@ -12,11 +13,104 @@
### Fixed
### Security
## [0.5.0]
## 0.7.0 - 2023-11-26
### Added
- Obligatory `type: KType` and `descriptor` property for `MetaConverters`
- Added separate `Meta`, `SealedMeta` and `ObservableMutableMeta` builders.
### Changed
- Meta converter `metaToObject` returns a non-nullable type. Additional method `metaToObjectOrNull` for nullable return.
- Kotlin 1.9.20.
- Migrated from ktor-io to kotlinx-io.
- `MutableMeta` builder now returns a simplified version of meta that does not hold listeners.
- More concise names for read/write methods in IO.
- Remove unnecessary confusion with `get`/`getMeta` by removing `getMeta` from the interface.
### Deprecated
- `String.parseValue` is replaced with `Value.parse`
### Fixed
- Memory leak in SealedMeta builder
## 0.6.2 - 2023-07-29
### Changed
- Meta to Json serializer now serializes a single item with index as an array. It is important for plotly integration.
- Meta to Json serializes Meta without children a value as literal or array instead of an object with `@value` field.
## 0.6.1 - 2023-03-31
### Added
- File cache for workspace
- Smart task metadata transformation for workspace
- Add `readOnly` property to descriptors
- Add `specOrNull` delegate to meta and Scheme
- Suspended read methods to the `Binary`
- Synchronously accessed `meta` to all `DataSet`s
- More fine-grained types in Action builders.
### Changed
- `Name::replaceLast` API
- `PluginFactory` no longer requires plugin class
- Collection<Named> toMap -> associateByName
- Simplified `DFTL` envelope format. Closing symbols are unnecessary. Properties are discontinued.
- Meta `get` method allows nullable receiver
- `withDefault` functions do not add new keys to meta children and are consistent.
- `dataforge.meta.values` package is merged into `dataforge.meta` for better star imports
- Kotlin 1.8.20
- `Factory` is now `fun interface` and uses `build` instead of `invoke`. `invoke moved to an extension.
- KTor 2.0
- DataTree `items` call is blocking.
- DataSet `getData` is no longer suspended and renamed to `get`
- DataSet operates with sequences of data instead of flows
- PartialEnvelope uses `Int` instead `UInt`.
- `ActiveDataSet` renamed to `DataSource`
- `selectOne`->`getByType`
- Data traversal in `DataSet` is done via iterator
- Remove all unnecessary properties for `IOFormat`
- Separate interfaces for `IOReader` and `IOWriter`
### Deprecated
- Context.fetch -> Context.request
### Fixed
- `readDataDirectory` does not split names with dots
- Front matter reader does not crash on non-UTF files
- Meta file name in readMeta from directory
- Tagless and FrontMatter envelope partial readers fix.
## 0.5.2
### Added
- Yaml plugin
- Partial fix to #53
### Fixed
- MutableMetaImpl attachment and checks
- Listeners in observable meta are replaced by lists
- JS number comparison bug.
## 0.5.0
### Added
- Experimental `listOfSpec` delegate.
### Changed
- **API breaking** Config is deprecated, use `ObservableMeta` instead.
- **API breaking** Descriptor no has a member property `defaultValue` instead of `defaultItem()` extension. It caches default value state on the first call. It is done because computing default on each call is too expensive.
- Kotlin 1.5.10
@ -27,24 +121,28 @@
- **API breaking** Configurable`config` changed to `meta`
### Removed
- `Config`
- Public PluginManager mutability
- Tables and tables-exposed moved to the separate project `tables.kt`
- BinaryMetaFormat. Use CBOR encoding instead
### Fixed
- Proper json array index treatment.
- Proper json index for single-value array.
### Security
## [0.4.0]
## 0.4.0
### Added
- LogManager plugin
- dataforge-context API dependency on SLF4j
- Context `withEnv` and `fetch` methods to manipulate plugins without changing plugins after creation.
- Split `ItemDescriptor` into builder and read-only part
### Changed
- Kotlin-logging moved from common to JVM and JS. Replaced by console for native.
- Package changed to `space.kscience`
- Scheme made observable
@ -54,19 +152,22 @@
- Refactor loggers
### Deprecated
- Direct use of PluginManager
### Removed
- Common dependency on Kotlin-logging
- Kotlinx-io fork dependency. Replaced by Ktor-io.
### Fixed
- Scheme properties properly handle children property change.
### Security
## 0.3.0
## [0.3.0]
### Added
- Yaml meta format based on yaml.kt
- `Path` builders
- Special ValueType for lists
@ -74,6 +175,7 @@
- Multiplatform yaml meta
### Changed
- `ListValue` and `DoubleArrayValue` implement `Iterable`.
- Changed the logic of `Value::isList` to check for type instead of size
- `Meta{}` builder made inline
@ -87,18 +189,10 @@
- \[Major breaking change\] Full refactor of DataTree/DataSource
- \[Major Breaking change\] Replace KClass with KType in data. Remove direct access to constructors with types.
### Deprecated
### Removed
### Fixed
### Security
## [0.2.0]
### Added
## 0.2.0
### Changed
- Context content resolution refactor
- Kotlin 1.4.10 (build tools 0.6.0)
- Empty query in Name is null instead of ""
@ -108,15 +202,16 @@
- Configurable is no longer MutableItemProvider. All functionality moved to Scheme.
### Deprecated
- Context activation API
- TextRenderer
### Removed
- Functional server prototype
- `dataforge-output` module
### Fixed
- Global context CoroutineScope resolution
- Library mode compliance
### Security

201
LICENSE Normal file
View File

@ -0,0 +1,201 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

View File

@ -3,47 +3,36 @@
![Gradle build](https://github.com/mipt-npm/dataforge-core/workflows/Gradle%20build/badge.svg)
<hr/>
* ### [dataforge-context](dataforge-context)
>
### [dataforge-context](dataforge-context)
> Context and provider definitions
>
> **Maturity**: DEVELOPMENT
<hr/>
* ### [dataforge-data](dataforge-data)
>
### [dataforge-data](dataforge-data)
>
> **Maturity**: EXPERIMENTAL
<hr/>
* ### [dataforge-io](dataforge-io)
>
### [dataforge-io](dataforge-io)
> IO module
>
> **Maturity**: PROTOTYPE
<hr/>
> **Maturity**: EXPERIMENTAL
* ### [dataforge-meta](dataforge-meta)
>
### [dataforge-meta](dataforge-meta)
> Meta definition and basic operations on meta
>
> **Maturity**: DEVELOPMENT
<hr/>
* ### [dataforge-scripting](dataforge-scripting)
>
### [dataforge-scripting](dataforge-scripting)
>
> **Maturity**: PROTOTYPE
<hr/>
* ### [dataforge-workspace](dataforge-workspace)
>
### [dataforge-workspace](dataforge-workspace)
>
> **Maturity**: EXPERIMENTAL
<hr/>
* ### [dataforge-io-yaml](dataforge-io/dataforge-io-yaml)
### [dataforge-io/dataforge-io-yaml](dataforge-io/dataforge-io-yaml)
> YAML meta converters and Front Matter envelope format
>
> **Maturity**: PROTOTYPE
<hr/>

View File

@ -1,14 +1,24 @@
import org.jetbrains.kotlin.gradle.tasks.KotlinCompile
import space.kscience.gradle.useApache2Licence
import space.kscience.gradle.useSPCTeam
plugins {
id("ru.mipt.npm.gradle.project")
id("space.kscience.gradle.project")
}
allprojects {
group = "space.kscience"
version = "0.5.0"
version = "0.7.0"
}
subprojects {
apply(plugin = "maven-publish")
tasks.withType<KotlinCompile> {
kotlinOptions {
freeCompilerArgs = freeCompilerArgs + "-Xcontext-receivers"
}
}
}
readme {
@ -16,14 +26,14 @@ readme {
}
ksciencePublish {
github("dataforge-core")
space("https://maven.pkg.jetbrains.space/mipt-npm/p/sci/maven")
pom("https://github.com/SciProgCentre/kmath") {
useApache2Licence()
useSPCTeam()
}
repository("spc","https://maven.sciprog.center/kscience")
sonatype()
}
apiValidation {
if(project.version.toString().contains("dev")) {
validationDisabled = true
}
nonPublicMarkers.add("space.kscience.dataforge.misc.DFExperimental")
}

View File

@ -0,0 +1,23 @@
# Module dataforge-context
Context and provider definitions
## Usage
## Artifact:
The Maven coordinates of this project are `space.kscience:dataforge-context:0.7.0`.
**Gradle Kotlin DSL:**
```kotlin
repositories {
maven("https://repo.kotlin.link")
//uncomment to access development builds
//maven("https://maven.pkg.jetbrains.space/spc/p/sci/dev")
mavenCentral()
}
dependencies {
implementation("space.kscience:dataforge-context:0.7.0")
}
```

View File

@ -3,16 +3,17 @@ public abstract class space/kscience/dataforge/context/AbstractPlugin : space/ks
public fun <init> (Lspace/kscience/dataforge/meta/Meta;)V
public synthetic fun <init> (Lspace/kscience/dataforge/meta/Meta;ILkotlin/jvm/internal/DefaultConstructorMarker;)V
public fun attach (Lspace/kscience/dataforge/context/Context;)V
public final fun dependsOn ()Ljava/util/Map;
public fun dependsOn ()Ljava/util/Map;
public fun detach ()V
public fun getContext ()Lspace/kscience/dataforge/context/Context;
public fun getMeta ()Lspace/kscience/dataforge/meta/Meta;
protected final fun require (Lspace/kscience/dataforge/context/PluginFactory;Lspace/kscience/dataforge/meta/Meta;)Lkotlin/properties/ReadOnlyProperty;
public static synthetic fun require$default (Lspace/kscience/dataforge/context/AbstractPlugin;Lspace/kscience/dataforge/context/PluginFactory;Lspace/kscience/dataforge/meta/Meta;ILjava/lang/Object;)Lkotlin/properties/ReadOnlyProperty;
public fun isAttached ()Z
protected final fun require (Lspace/kscience/dataforge/context/PluginFactory;Lkotlin/reflect/KClass;Lspace/kscience/dataforge/meta/Meta;)Lkotlin/properties/ReadOnlyProperty;
public static synthetic fun require$default (Lspace/kscience/dataforge/context/AbstractPlugin;Lspace/kscience/dataforge/context/PluginFactory;Lkotlin/reflect/KClass;Lspace/kscience/dataforge/meta/Meta;ILjava/lang/Object;)Lkotlin/properties/ReadOnlyProperty;
}
public final class space/kscience/dataforge/context/AbstractPluginKt {
public static final fun toMap (Ljava/util/Collection;)Ljava/util/Map;
public static final fun associateByName (Ljava/util/Collection;)Ljava/util/Map;
}
public final class space/kscience/dataforge/context/ClassLoaderPlugin : space/kscience/dataforge/context/AbstractPlugin {
@ -33,8 +34,8 @@ public final class space/kscience/dataforge/context/ClassLoaderPluginKt {
public class space/kscience/dataforge/context/Context : kotlinx/coroutines/CoroutineScope, space/kscience/dataforge/meta/MetaRepr, space/kscience/dataforge/misc/Named, space/kscience/dataforge/provider/Provider {
public static final field Companion Lspace/kscience/dataforge/context/Context$Companion;
public static final field PROPERTY_TARGET Ljava/lang/String;
public final fun buildContext (Ljava/lang/String;Lkotlin/jvm/functions/Function1;)Lspace/kscience/dataforge/context/Context;
public static synthetic fun buildContext$default (Lspace/kscience/dataforge/context/Context;Ljava/lang/String;Lkotlin/jvm/functions/Function1;ILjava/lang/Object;)Lspace/kscience/dataforge/context/Context;
public final fun buildContext (Lspace/kscience/dataforge/names/Name;Lkotlin/jvm/functions/Function1;)Lspace/kscience/dataforge/context/Context;
public static synthetic fun buildContext$default (Lspace/kscience/dataforge/context/Context;Lspace/kscience/dataforge/names/Name;Lkotlin/jvm/functions/Function1;ILjava/lang/Object;)Lspace/kscience/dataforge/context/Context;
public fun close ()V
public fun content (Ljava/lang/String;)Ljava/util/Map;
public final fun content (Ljava/lang/String;Z)Ljava/util/Map;
@ -57,7 +58,6 @@ public abstract interface class space/kscience/dataforge/context/ContextAware {
public final class space/kscience/dataforge/context/ContextBuilder {
public final fun build ()Lspace/kscience/dataforge/context/Context;
public final fun getName ()Lspace/kscience/dataforge/names/Name;
public final fun name (Ljava/lang/String;)V
public final fun plugin (Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;Lkotlin/jvm/functions/Function1;)V
public final fun plugin (Lspace/kscience/dataforge/context/Plugin;)V
public final fun plugin (Lspace/kscience/dataforge/context/PluginFactory;Lkotlin/jvm/functions/Function1;)V
@ -67,11 +67,9 @@ public final class space/kscience/dataforge/context/ContextBuilder {
public static synthetic fun plugin$default (Lspace/kscience/dataforge/context/ContextBuilder;Lspace/kscience/dataforge/context/PluginFactory;Lkotlin/jvm/functions/Function1;ILjava/lang/Object;)V
public static synthetic fun plugin$default (Lspace/kscience/dataforge/context/ContextBuilder;Lspace/kscience/dataforge/context/PluginTag;Lkotlin/jvm/functions/Function1;ILjava/lang/Object;)V
public final fun properties (Lkotlin/jvm/functions/Function1;)V
public final fun setName (Lspace/kscience/dataforge/names/Name;)V
}
public final class space/kscience/dataforge/context/ContextBuilderKt {
public static final fun withEnv (Lspace/kscience/dataforge/context/Context;Lkotlin/jvm/functions/Function1;)Lspace/kscience/dataforge/context/Context;
}
public final class space/kscience/dataforge/context/DefaultLogManager : space/kscience/dataforge/context/AbstractPlugin, space/kscience/dataforge/context/LogManager {
@ -83,14 +81,17 @@ public final class space/kscience/dataforge/context/DefaultLogManager : space/ks
}
public final class space/kscience/dataforge/context/DefaultLogManager$Companion : space/kscience/dataforge/context/PluginFactory {
public synthetic fun build (Lspace/kscience/dataforge/context/Context;Lspace/kscience/dataforge/meta/Meta;)Ljava/lang/Object;
public fun build (Lspace/kscience/dataforge/context/Context;Lspace/kscience/dataforge/meta/Meta;)Lspace/kscience/dataforge/context/DefaultLogManager;
public fun getTag ()Lspace/kscience/dataforge/context/PluginTag;
public fun getType ()Lkotlin/reflect/KClass;
public synthetic fun invoke (Lspace/kscience/dataforge/meta/Meta;Lspace/kscience/dataforge/context/Context;)Ljava/lang/Object;
public fun invoke (Lspace/kscience/dataforge/meta/Meta;Lspace/kscience/dataforge/context/Context;)Lspace/kscience/dataforge/context/DefaultLogManager;
}
public abstract interface class space/kscience/dataforge/context/Factory {
public abstract fun invoke (Lspace/kscience/dataforge/meta/Meta;Lspace/kscience/dataforge/context/Context;)Ljava/lang/Object;
public abstract fun build (Lspace/kscience/dataforge/context/Context;Lspace/kscience/dataforge/meta/Meta;)Ljava/lang/Object;
}
public final class space/kscience/dataforge/context/FactoryKt {
public static final fun invoke (Lspace/kscience/dataforge/context/Factory;Lspace/kscience/dataforge/meta/Meta;Lspace/kscience/dataforge/context/Context;)Ljava/lang/Object;
public static synthetic fun invoke$default (Lspace/kscience/dataforge/context/Factory;Lspace/kscience/dataforge/meta/Meta;Lspace/kscience/dataforge/context/Context;ILjava/lang/Object;)Ljava/lang/Object;
}
@ -145,6 +146,7 @@ public abstract interface class space/kscience/dataforge/context/Plugin : space/
public abstract fun getMeta ()Lspace/kscience/dataforge/meta/Meta;
public fun getName ()Lspace/kscience/dataforge/names/Name;
public abstract fun getTag ()Lspace/kscience/dataforge/context/PluginTag;
public abstract fun isAttached ()Z
public fun toMeta ()Lspace/kscience/dataforge/meta/Meta;
}
@ -152,11 +154,26 @@ public final class space/kscience/dataforge/context/Plugin$Companion {
public static final field TARGET Ljava/lang/String;
}
public final class space/kscience/dataforge/context/PluginBuilder {
public fun <init> (Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;)V
public synthetic fun <init> (Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;ILkotlin/jvm/internal/DefaultConstructorMarker;)V
public final fun build ()Lspace/kscience/dataforge/context/PluginFactory;
public final fun getTag ()Lspace/kscience/dataforge/context/PluginTag;
public final fun provides (Ljava/lang/String;Ljava/util/Map;)V
public final fun provides (Ljava/lang/String;[Lspace/kscience/dataforge/misc/Named;)V
public final fun requires (Lspace/kscience/dataforge/context/PluginFactory;Lspace/kscience/dataforge/meta/Meta;)V
public static synthetic fun requires$default (Lspace/kscience/dataforge/context/PluginBuilder;Lspace/kscience/dataforge/context/PluginFactory;Lspace/kscience/dataforge/meta/Meta;ILjava/lang/Object;)V
}
public final class space/kscience/dataforge/context/PluginBuilderKt {
public static final fun PluginFactory (Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;Lkotlin/jvm/functions/Function1;)Lspace/kscience/dataforge/context/PluginFactory;
public static synthetic fun PluginFactory$default (Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;Lkotlin/jvm/functions/Function1;ILjava/lang/Object;)Lspace/kscience/dataforge/context/PluginFactory;
}
public abstract interface class space/kscience/dataforge/context/PluginFactory : space/kscience/dataforge/context/Factory {
public static final field Companion Lspace/kscience/dataforge/context/PluginFactory$Companion;
public static final field TYPE Ljava/lang/String;
public abstract fun getTag ()Lspace/kscience/dataforge/context/PluginTag;
public abstract fun getType ()Lkotlin/reflect/KClass;
}
public final class space/kscience/dataforge/context/PluginFactory$Companion {
@ -166,10 +183,10 @@ public final class space/kscience/dataforge/context/PluginFactory$Companion {
public final class space/kscience/dataforge/context/PluginManager : java/lang/Iterable, kotlin/jvm/internal/markers/KMappedMarker, space/kscience/dataforge/context/ContextAware {
public final fun find (ZLkotlin/jvm/functions/Function1;)Lspace/kscience/dataforge/context/Plugin;
public static synthetic fun find$default (Lspace/kscience/dataforge/context/PluginManager;ZLkotlin/jvm/functions/Function1;ILjava/lang/Object;)Lspace/kscience/dataforge/context/Plugin;
public final fun get (Lkotlin/reflect/KClass;Lspace/kscience/dataforge/context/PluginTag;Z)Ljava/lang/Object;
public final fun get (Lspace/kscience/dataforge/context/PluginTag;Z)Lspace/kscience/dataforge/context/Plugin;
public static synthetic fun get$default (Lspace/kscience/dataforge/context/PluginManager;Lkotlin/reflect/KClass;Lspace/kscience/dataforge/context/PluginTag;ZILjava/lang/Object;)Ljava/lang/Object;
public static synthetic fun get$default (Lspace/kscience/dataforge/context/PluginManager;Lspace/kscience/dataforge/context/PluginTag;ZILjava/lang/Object;)Lspace/kscience/dataforge/context/Plugin;
public final fun getByType (Lkotlin/reflect/KClass;Lspace/kscience/dataforge/context/PluginTag;Z)Ljava/lang/Object;
public static synthetic fun getByType$default (Lspace/kscience/dataforge/context/PluginManager;Lkotlin/reflect/KClass;Lspace/kscience/dataforge/context/PluginTag;ZILjava/lang/Object;)Ljava/lang/Object;
public fun getContext ()Lspace/kscience/dataforge/context/Context;
public fun iterator ()Ljava/util/Iterator;
public final fun list (Z)Ljava/util/Collection;
@ -195,14 +212,25 @@ public final class space/kscience/dataforge/context/PluginTag : space/kscience/d
public fun toString ()Ljava/lang/String;
}
public final class space/kscience/dataforge/context/PluginTag$$serializer : kotlinx/serialization/internal/GeneratedSerializer {
public static final field INSTANCE Lspace/kscience/dataforge/context/PluginTag$$serializer;
public fun childSerializers ()[Lkotlinx/serialization/KSerializer;
public synthetic fun deserialize (Lkotlinx/serialization/encoding/Decoder;)Ljava/lang/Object;
public fun deserialize (Lkotlinx/serialization/encoding/Decoder;)Lspace/kscience/dataforge/context/PluginTag;
public fun getDescriptor ()Lkotlinx/serialization/descriptors/SerialDescriptor;
public synthetic fun serialize (Lkotlinx/serialization/encoding/Encoder;Ljava/lang/Object;)V
public fun serialize (Lkotlinx/serialization/encoding/Encoder;Lspace/kscience/dataforge/context/PluginTag;)V
public fun typeParametersSerializers ()[Lkotlinx/serialization/KSerializer;
}
public final class space/kscience/dataforge/context/PluginTag$Companion {
public final fun fromString (Ljava/lang/String;)Lspace/kscience/dataforge/context/PluginTag;
public final fun serializer ()Lkotlinx/serialization/KSerializer;
}
public final class space/kscience/dataforge/context/ResolveKt {
public static final fun gather (Lspace/kscience/dataforge/context/Context;Ljava/lang/String;Lkotlin/reflect/KClass;Z)Ljava/util/Map;
public static synthetic fun gather$default (Lspace/kscience/dataforge/context/Context;Ljava/lang/String;Lkotlin/reflect/KClass;ZILjava/lang/Object;)Ljava/util/Map;
public static synthetic fun gatherInSequence$default (Lspace/kscience/dataforge/context/Context;Ljava/lang/String;Lkotlin/reflect/KClass;ZILjava/lang/Object;)Lkotlin/sequences/Sequence;
public static final fun getValues (Lkotlin/sequences/Sequence;)Lkotlin/sequences/Sequence;
public static final fun resolve (Lspace/kscience/dataforge/context/Context;Ljava/lang/String;Lspace/kscience/dataforge/names/Name;Lkotlin/reflect/KClass;)Ljava/lang/Object;
}
@ -216,10 +244,9 @@ public final class space/kscience/dataforge/context/SlfLogManager : space/kscien
}
public final class space/kscience/dataforge/context/SlfLogManager$Companion : space/kscience/dataforge/context/PluginFactory {
public synthetic fun build (Lspace/kscience/dataforge/context/Context;Lspace/kscience/dataforge/meta/Meta;)Ljava/lang/Object;
public fun build (Lspace/kscience/dataforge/context/Context;Lspace/kscience/dataforge/meta/Meta;)Lspace/kscience/dataforge/context/SlfLogManager;
public fun getTag ()Lspace/kscience/dataforge/context/PluginTag;
public fun getType ()Lkotlin/reflect/KClass;
public synthetic fun invoke (Lspace/kscience/dataforge/meta/Meta;Lspace/kscience/dataforge/context/Context;)Ljava/lang/Object;
public fun invoke (Lspace/kscience/dataforge/meta/Meta;Lspace/kscience/dataforge/context/Context;)Lspace/kscience/dataforge/context/SlfLogManager;
}
public final class space/kscience/dataforge/properties/PropertyKt {
@ -229,7 +256,6 @@ public final class space/kscience/dataforge/properties/SchemePropertyKt {
}
public final class space/kscience/dataforge/provider/DfTypeKt {
public static final fun getDfType (Lkotlin/reflect/KClass;)Ljava/lang/String;
}
public final class space/kscience/dataforge/provider/Path : java/lang/Iterable, kotlin/jvm/internal/markers/KMappedMarker {
@ -283,7 +309,8 @@ public final class space/kscience/dataforge/provider/PathToken {
}
public final class space/kscience/dataforge/provider/PathToken$Companion {
public final fun parse (Ljava/lang/String;)Lspace/kscience/dataforge/provider/PathToken;
public final fun parse (Ljava/lang/String;Z)Lspace/kscience/dataforge/provider/PathToken;
public static synthetic fun parse$default (Lspace/kscience/dataforge/provider/PathToken$Companion;Ljava/lang/String;ZILjava/lang/Object;)Lspace/kscience/dataforge/provider/PathToken;
}
public abstract interface class space/kscience/dataforge/provider/Provider {

View File

@ -1,35 +1,24 @@
plugins {
id("ru.mipt.npm.gradle.mpp")
id("ru.mipt.npm.gradle.native")
id("space.kscience.gradle.mpp")
}
description = "Context and provider definitions"
kscience {
jvm()
js()
native()
useCoroutines()
}
kotlin {
sourceSets {
val commonMain by getting{
dependencies {
api(project(":dataforge-meta"))
}
}
jvmMain {
dependencies {
api(kotlin("reflect"))
api("org.slf4j:slf4j-api:1.7.30")
}
}
jsMain {
dependencies {
}
}
useSerialization()
dependencies {
api(project(":dataforge-meta"))
}
dependencies(jvmMain){
api(kotlin("reflect"))
api("org.slf4j:slf4j-api:1.7.30")
}
}
readme{
maturity = ru.mipt.npm.gradle.Maturity.DEVELOPMENT
readme {
maturity = space.kscience.gradle.Maturity.DEVELOPMENT
}

View File

@ -1,6 +1,7 @@
package space.kscience.dataforge.context
import space.kscience.dataforge.meta.Meta
import space.kscience.dataforge.misc.DFInternal
import space.kscience.dataforge.misc.Named
import space.kscience.dataforge.names.Name
import kotlin.properties.ReadOnlyProperty
@ -11,6 +12,8 @@ public abstract class AbstractPlugin(override val meta: Meta = Meta.EMPTY) : Plu
private var _context: Context? = null
private val dependencies = HashMap<PluginFactory<*>, Meta>()
override val isAttached: Boolean get() = _context != null
override val context: Context
get() = _context ?: error("Plugin $tag is not attached")
@ -22,21 +25,33 @@ public abstract class AbstractPlugin(override val meta: Meta = Meta.EMPTY) : Plu
this._context = null
}
final override fun dependsOn(): Map<PluginFactory<*>, Meta> = dependencies
override fun dependsOn(): Map<PluginFactory<*>, Meta> = dependencies
protected fun <P : Plugin> require(
factory: PluginFactory<P>,
type: KClass<P>,
meta: Meta = Meta.EMPTY,
): ReadOnlyProperty<AbstractPlugin, P> {
dependencies[factory] = meta
return PluginDependencyDelegate(factory, type)
}
/**
* Register plugin dependency and return a delegate which provides lazily initialized reference to dependent plugin
*/
protected fun <P : Plugin> require(factory: PluginFactory<P>, meta: Meta = Meta.EMPTY): ReadOnlyProperty<AbstractPlugin, P> {
dependencies[factory] = meta
return PluginDependencyDelegate(factory.type)
}
protected inline fun <reified P : Plugin> require(
factory: PluginFactory<P>,
meta: Meta = Meta.EMPTY,
): ReadOnlyProperty<AbstractPlugin, P> = require(factory, P::class, meta)
}
public fun <T : Named> Collection<T>.toMap(): Map<Name, T> = associate { it.name to it }
public fun <T : Named> Collection<T>.associateByName(): Map<Name, T> = associate { it.name to it }
private class PluginDependencyDelegate<P : Plugin>(val type: KClass<out P>) : ReadOnlyProperty<AbstractPlugin, P> {
private class PluginDependencyDelegate<P : Plugin>(val factory: PluginFactory<P>, val type: KClass<P>) :
ReadOnlyProperty<AbstractPlugin, P> {
@OptIn(DFInternal::class)
override fun getValue(thisRef: AbstractPlugin, property: KProperty<*>): P {
return thisRef.context.plugins[type] ?: error("Plugin with type $type not found")
if (!thisRef.isAttached) error("Plugin dependency must not be called eagerly during initialization.")
return thisRef.context.plugins.getByType(type, factory.tag) ?: error("Plugin ${factory.tag} not found")
}
}

View File

@ -4,11 +4,12 @@ import kotlinx.coroutines.CoroutineScope
import kotlinx.coroutines.Job
import kotlinx.coroutines.SupervisorJob
import space.kscience.dataforge.meta.*
import space.kscience.dataforge.misc.DFExperimental
import space.kscience.dataforge.misc.Named
import space.kscience.dataforge.misc.ThreadSafe
import space.kscience.dataforge.names.Name
import space.kscience.dataforge.provider.Provider
import kotlin.coroutines.CoroutineContext
import kotlin.jvm.Synchronized
/**
* The local environment for anything being done in DataForge framework. Contexts are organized into tree structure with [Global] at the top.
@ -71,16 +72,16 @@ public open class Context internal constructor(
private val childrenContexts = HashMap<Name, Context>()
/**
* Build and register a child context
* Get and validate existing context or build and register a new child context.
* @param name the relative (tail) name of the new context. If null, uses context hash code as a marker.
*/
@Synchronized
public fun buildContext(name: String? = null, block: ContextBuilder.() -> Unit = {}): Context {
val newContext = ContextBuilder(this)
.apply { name?.let { name(it) } }
.apply(block)
.build()
childrenContexts[newContext.name] = newContext
return newContext
@OptIn(DFExperimental::class)
@ThreadSafe
public fun buildContext(name: Name? = null, block: ContextBuilder.() -> Unit = {}): Context {
val existing = name?.let { childrenContexts[name] }
return existing?.modify(block) ?: ContextBuilder(this, name).apply(block).build().also {
childrenContexts[it.name] = it
}
}
/**

View File

@ -20,7 +20,7 @@ import kotlin.collections.set
@DFBuilder
public class ContextBuilder internal constructor(
private val parent: Context,
public var name: Name? = null,
public val name: Name? = null,
meta: Meta = Meta.EMPTY,
) {
internal val factories = HashMap<PluginFactory<*>, Meta>()
@ -30,10 +30,6 @@ public class ContextBuilder internal constructor(
meta.action()
}
public fun name(string: String) {
this.name = Name.parse(string)
}
@OptIn(DFExperimental::class)
private fun findPluginFactory(tag: PluginTag): PluginFactory<*> =
parent.gatherInSequence<PluginFactory<*>>(PluginFactory.TYPE).values
@ -72,7 +68,7 @@ public class ContextBuilder internal constructor(
// Add if does not exist
if (existing == null) {
//TODO bypass if parent already has plugin with given meta?
val plugin = factory(meta, parent)
val plugin = factory.build(parent, meta)
for ((depFactory, deoMeta) in plugin.dependsOn()) {
addPlugin(depFactory, deoMeta)
@ -95,19 +91,21 @@ public class ContextBuilder internal constructor(
}
/**
* Check if current context contains all plugins required by the builder and return it it does or forks to a new context
* Check if current context contains all plugins required by the builder and return it does or forks to a new context
* if it does not.
*/
public fun Context.withEnv(block: ContextBuilder.() -> Unit): Context {
@DFExperimental
public fun Context.modify(block: ContextBuilder.() -> Unit): Context {
fun Context.contains(factory: PluginFactory<*>, meta: Meta): Boolean {
val loaded = plugins[factory.tag] ?: return false
return loaded.meta == meta
}
val builder = ContextBuilder(this, name + "env", properties).apply(block)
val builder = ContextBuilder(this, name + "mod", properties).apply(block)
val requiresFork = builder.factories.any { (factory, meta) ->
!contains(factory, meta)
} || ((properties as Meta) == builder.meta)
return if (requiresFork) builder.build() else this
}

View File

@ -2,6 +2,11 @@ package space.kscience.dataforge.context
import space.kscience.dataforge.meta.Meta
public interface Factory<out T : Any> {
public operator fun invoke(meta: Meta = Meta.EMPTY, context: Context = Global): T
}
public fun interface Factory<out T> {
public fun build(context: Context, meta: Meta): T
}
public operator fun <T> Factory<T>.invoke(
meta: Meta = Meta.EMPTY,
context: Context = Global,
): T = build(context, meta)

View File

@ -4,10 +4,11 @@ import kotlinx.coroutines.CoroutineName
import kotlinx.coroutines.Job
import space.kscience.dataforge.meta.Meta
import space.kscience.dataforge.names.asName
import space.kscience.dataforge.names.parseAsName
import kotlin.coroutines.CoroutineContext
import kotlin.native.concurrent.ThreadLocal
internal expect val globalLoggerFactory: PluginFactory<out LogManager>
internal expect fun getGlobalLoggerFactory(): PluginFactory<out LogManager>
/**
* A global root context. Closing [Global] terminates the framework.
@ -20,4 +21,4 @@ private object GlobalContext : Context("GLOBAL".asName(), null, emptySet(), Meta
public val Global: Context get() = GlobalContext
public fun Context(name: String? = null, block: ContextBuilder.() -> Unit = {}): Context =
Global.buildContext(name, block)
Global.buildContext(name?.parseAsName(), block)

View File

@ -4,7 +4,6 @@ import space.kscience.dataforge.meta.Meta
import space.kscience.dataforge.misc.Named
import space.kscience.dataforge.names.Name
import space.kscience.dataforge.names.plus
import kotlin.reflect.KClass
public fun interface Logger {
public fun log(tag: String, body: () -> String)
@ -63,10 +62,9 @@ public class DefaultLogManager : AbstractPlugin(), LogManager {
override val tag: PluginTag get() = Companion.tag
public companion object : PluginFactory<DefaultLogManager> {
override fun invoke(meta: Meta, context: Context): DefaultLogManager = DefaultLogManager()
override fun build(context: Context, meta: Meta): DefaultLogManager = DefaultLogManager()
override val tag: PluginTag = PluginTag(group = PluginTag.DATAFORGE_GROUP, name = "log.default")
override val type: KClass<out DefaultLogManager> = DefaultLogManager::class
}
}
@ -75,7 +73,7 @@ public class DefaultLogManager : AbstractPlugin(), LogManager {
*/
public val Context.logger: LogManager
get() = plugins.find(inherit = true) { it is LogManager } as? LogManager
?: globalLoggerFactory(context = Global).apply { attach(Global) }
?: getGlobalLoggerFactory().build(context = Global, meta = Meta.EMPTY).apply { attach(Global) }
/**
* The named proxy logger for a context member

View File

@ -3,9 +3,10 @@ package space.kscience.dataforge.context
import space.kscience.dataforge.context.Plugin.Companion.TARGET
import space.kscience.dataforge.meta.Meta
import space.kscience.dataforge.meta.MetaRepr
import space.kscience.dataforge.misc.DfId
import space.kscience.dataforge.misc.Named
import space.kscience.dataforge.misc.Type
import space.kscience.dataforge.names.Name
import space.kscience.dataforge.names.parseAsName
import space.kscience.dataforge.provider.Provider
/**
@ -17,7 +18,7 @@ import space.kscience.dataforge.provider.Provider
*
* create - configure - attach - detach - destroy
*/
@Type(TARGET)
@DfId(TARGET)
public interface Plugin : Named, ContextAware, Provider, MetaRepr {
/**
@ -30,7 +31,7 @@ public interface Plugin : Named, ContextAware, Provider, MetaRepr {
/**
* The name of this plugin ignoring version and group
*/
override val name: Name get() = Name.parse(tag.name)
override val name: Name get() = tag.name.parseAsName()
/**
* Plugin dependencies which are required to attach this plugin. Plugin
@ -52,6 +53,8 @@ public interface Plugin : Named, ContextAware, Provider, MetaRepr {
*/
public fun detach()
public val isAttached: Boolean
override fun toMeta(): Meta = Meta {
"context" put context.name.toString()
"type" to this::class.simpleName

View File

@ -0,0 +1,57 @@
package space.kscience.dataforge.context
import space.kscience.dataforge.meta.Meta
import space.kscience.dataforge.misc.Named
import space.kscience.dataforge.names.Name
/**
* A convenience factory to build simple plugins
*/
public class PluginBuilder(
name: String,
group: String = "",
version: String = "",
) {
public val tag: PluginTag = PluginTag(name, group, version)
private val content = HashMap<String, MutableMap<Name, Any>>()
private val dependencies = HashMap<PluginFactory<*>, Meta>()
public fun requires(
factory: PluginFactory<*>,
meta: Meta = Meta.EMPTY,
) {
dependencies[factory] = meta
}
public fun provides(target: String, items: Map<Name, Any>) {
content.getOrPut(target) { HashMap() }.putAll(items)
}
public fun provides(target: String, vararg items: Named) {
provides(target, items.associateBy { it.name })
}
public fun build(): PluginFactory<*> {
return object : PluginFactory<Plugin> {
override val tag: PluginTag get() = this@PluginBuilder.tag
override fun build(context: Context, meta: Meta): Plugin = object : AbstractPlugin() {
override val tag: PluginTag get() = this@PluginBuilder.tag
override fun content(target: String): Map<Name, Any> = this@PluginBuilder.content[target] ?: emptyMap()
override fun dependsOn(): Map<PluginFactory<*>, Meta> = this@PluginBuilder.dependencies
}
}
}
}
public fun PluginFactory(
name: String,
group: String = "",
version: String = "",
block: PluginBuilder.() -> Unit,
): PluginFactory<*> = PluginBuilder(name, group, version).apply(block).build()

View File

@ -1,13 +1,11 @@
package space.kscience.dataforge.context
import space.kscience.dataforge.meta.Meta
import space.kscience.dataforge.misc.Type
import kotlin.reflect.KClass
import space.kscience.dataforge.misc.DfId
@Type(PluginFactory.TYPE)
@DfId(PluginFactory.TYPE)
public interface PluginFactory<T : Plugin> : Factory<T> {
public val tag: PluginTag
public val type: KClass<out T>
public companion object {
public const val TYPE: String = "pluginFactory"
@ -18,7 +16,6 @@ public interface PluginFactory<T : Plugin> : Factory<T> {
* Plugin factory created for the specific actual plugin
*/
internal class DeFactoPluginFactory<T : Plugin>(val plugin: T) : PluginFactory<T> {
override fun invoke(meta: Meta, context: Context): T = plugin
override fun build(context: Context, meta: Meta): T = plugin
override val tag: PluginTag get() = plugin.tag
override val type: KClass<out T> get() = plugin::class
}

View File

@ -1,7 +1,10 @@
package space.kscience.dataforge.context
import space.kscience.dataforge.meta.Meta
import space.kscience.dataforge.misc.DFInternal
import space.kscience.dataforge.names.plus
import kotlin.reflect.KClass
import kotlin.reflect.cast
/**
@ -12,7 +15,7 @@ import kotlin.reflect.KClass
*/
public class PluginManager internal constructor(
override val context: Context,
private val plugins: Set<Plugin>
private val plugins: Set<Plugin>,
) : ContextAware, Iterable<Plugin> {
init {
@ -63,15 +66,17 @@ public class PluginManager internal constructor(
* @param <T>
* @return
*/
@Suppress("UNCHECKED_CAST")
public operator fun <T : Any> get(type: KClass<out T>, tag: PluginTag? = null, recursive: Boolean = true): T? =
find(recursive) { type.isInstance(it) && (tag == null || tag.matches(it.tag)) } as T?
@DFInternal
public fun <T : Any> getByType(type: KClass<T>, tag: PluginTag? = null, inherit: Boolean = true): T? =
find(inherit) { type.isInstance(it) && (tag == null || tag.matches(it.tag)) }?.let { type.cast(it) }
@OptIn(DFInternal::class)
public inline operator fun <reified T : Any> get(tag: PluginTag? = null, recursive: Boolean = true): T? =
get(T::class, tag, recursive)
getByType(T::class, tag, recursive)
@OptIn(DFInternal::class)
public inline operator fun <reified T : Plugin> get(factory: PluginFactory<T>, recursive: Boolean = true): T? =
get(factory.type, factory.tag, recursive)
getByType(T::class, factory.tag, recursive)
override fun iterator(): Iterator<Plugin> = plugins.iterator()
}
@ -80,12 +85,16 @@ public class PluginManager internal constructor(
* Fetch a plugin with given meta from the context. If the plugin (with given meta) is already registered, it is returned.
* Otherwise, new child context with the plugin is created. In the later case the context could be retrieved from the plugin.
*/
public inline fun <reified T : Plugin> Context.fetch(factory: PluginFactory<T>, meta: Meta = Meta.EMPTY): T {
public inline fun <reified T : Plugin> Context.request(factory: PluginFactory<T>, meta: Meta = Meta.EMPTY): T {
val existing = plugins[factory]
return if (existing != null && existing.meta == meta) existing
else {
buildContext {
buildContext(name = this@request.name + factory.tag.name) {
plugin(factory, meta)
}.plugins[factory]!!
}
}
}
@Deprecated("Replace with request", ReplaceWith("request(factory, meta)"))
public inline fun <reified T : Plugin> Context.fetch(factory: PluginFactory<T>, meta: Meta = Meta.EMPTY): T =
request(factory, meta)

View File

@ -1,5 +1,6 @@
package space.kscience.dataforge.context
import kotlinx.serialization.Serializable
import space.kscience.dataforge.meta.Meta
import space.kscience.dataforge.meta.MetaRepr
@ -9,6 +10,7 @@ import space.kscience.dataforge.meta.MetaRepr
*
* @author Alexander Nozik
*/
@Serializable
public data class PluginTag(
val name: String,
val group: String = "",

View File

@ -1,7 +1,8 @@
package space.kscience.dataforge.properties
import space.kscience.dataforge.meta.*
import space.kscience.dataforge.meta.Meta
import space.kscience.dataforge.meta.ObservableMutableMeta
import space.kscience.dataforge.meta.transformations.MetaConverter
import space.kscience.dataforge.meta.transformations.nullableMetaToObject
import space.kscience.dataforge.meta.transformations.nullableObjectToMeta
@ -24,7 +25,7 @@ public class MetaProperty<T : Any>(
override fun onChange(owner: Any?, callback: (T?) -> Unit) {
meta.onChange(owner) { name ->
if (name.startsWith(this@MetaProperty.name)) callback(converter.nullableMetaToObject(get(name)))
if (name.startsWith(this@MetaProperty.name)) callback(converter.nullableMetaToObject(this[name]))
}
}

View File

@ -3,7 +3,7 @@ package space.kscience.dataforge.properties
import space.kscience.dataforge.meta.Scheme
import space.kscience.dataforge.misc.DFExperimental
import space.kscience.dataforge.names.Name
import space.kscience.dataforge.names.parseAsName
import space.kscience.dataforge.names.startsWith
import kotlin.reflect.KMutableProperty1
@ -18,7 +18,7 @@ public fun <S : Scheme, T : Any> S.property(property: KMutableProperty1<S, T?>):
override fun onChange(owner: Any?, callback: (T?) -> Unit) {
this@property.meta.onChange(this) { name ->
if (name.startsWith(Name.parse(property.name))) {
if (name.startsWith(property.name.parseAsName(true))) {
callback(property.get(this@property))
}
}

View File

@ -16,6 +16,7 @@
package space.kscience.dataforge.provider
import space.kscience.dataforge.names.Name
import space.kscience.dataforge.names.parseAsName
import kotlin.jvm.JvmInline
/**
@ -60,9 +61,10 @@ public data class PathToken(val name: Name, val target: String? = null) {
public companion object {
public const val TARGET_SEPARATOR: String = "::"
public fun parse(token: String): PathToken {
public fun parse(token: String, cache: Boolean = false): PathToken {
val target = token.substringBefore(TARGET_SEPARATOR, "")
val name = Name.parse(token.substringAfter(TARGET_SEPARATOR))
val name = token.substringAfter(TARGET_SEPARATOR).parseAsName(cache)
if (target.contains("[")) TODO("target separators in queries are not supported")
return PathToken(name, target)
}

View File

@ -75,10 +75,8 @@ public inline fun <reified T : Any> Provider.provide(path: String, targetOverrid
/**
* Typed top level content
*/
public fun <T : Any> Provider.top(target: String, type: KClass<out T>): Map<Name, T> {
return content(target).mapValues {
type.safeCast(it.value) ?: error("The type of element $it is ${it::class} but $type is expected")
}
public fun <T : Any> Provider.top(target: String, type: KClass<out T>): Map<Name, T> = content(target).mapValues {
type.safeCast(it.value) ?: error("The type of element ${it.value} is ${it.value::class} but $type is expected")
}
/**

View File

@ -1,7 +1,7 @@
package space.kscience.dataforge.context
import space.kscience.dataforge.names.Name
import space.kscience.dataforge.names.appendLeft
import space.kscience.dataforge.names.appendFirst
import kotlin.test.Test
import kotlin.test.assertEquals
@ -20,14 +20,13 @@ class ContextTest {
@Test
fun testPluginManager() {
val context = Global.buildContext {
name("test")
val context = Context("test") {
plugin(DummyPlugin())
}
val members = context.gather<Name>("test")
assertEquals(3, members.count())
members.forEach {
assertEquals(it.key, it.value.appendLeft("test"))
assertEquals(it.key, it.value.appendFirst("test"))
}
}

View File

@ -2,13 +2,14 @@ package space.kscience.dataforge.context
import space.kscience.dataforge.meta.Meta
import space.kscience.dataforge.names.Name
import kotlin.reflect.KClass
public class ConsoleLogManager : AbstractPlugin(), LogManager {
override fun logger(name: Name): Logger = Logger { tag, body ->
val message: String = body.safe
when (tag) {
// TODO depends on https://youtrack.jetbrains.com/issue/KT-33595/
LogManager.DEBUG -> console.asDynamic().debug("[${context.name}] $name: $message")
LogManager.INFO -> console.info("[${context.name}] $name: $message")
LogManager.WARNING -> console.warn("[${context.name}] $name: $message")
LogManager.ERROR -> console.error("[${context.name}] $name: $message")
@ -22,11 +23,10 @@ public class ConsoleLogManager : AbstractPlugin(), LogManager {
override val tag: PluginTag get() = Companion.tag
public companion object : PluginFactory<ConsoleLogManager> {
override fun invoke(meta: Meta, context: Context): ConsoleLogManager = ConsoleLogManager()
override fun build(context: Context, meta: Meta): ConsoleLogManager = ConsoleLogManager()
override val tag: PluginTag = PluginTag(group = PluginTag.DATAFORGE_GROUP, name = "log.jsConsole")
override val type: KClass<out ConsoleLogManager> = ConsoleLogManager::class
}
}
internal actual val globalLoggerFactory: PluginFactory<out LogManager> = ConsoleLogManager
internal actual fun getGlobalLoggerFactory(): PluginFactory<out LogManager> = ConsoleLogManager

View File

@ -3,7 +3,6 @@ package space.kscience.dataforge.context
import org.slf4j.LoggerFactory
import space.kscience.dataforge.meta.Meta
import space.kscience.dataforge.names.Name
import kotlin.reflect.KClass
public class SlfLogManager : AbstractPlugin(), LogManager {
@ -24,11 +23,10 @@ public class SlfLogManager : AbstractPlugin(), LogManager {
override val tag: PluginTag get() = Companion.tag
public companion object : PluginFactory<SlfLogManager> {
override fun invoke(meta: Meta, context: Context): SlfLogManager = SlfLogManager()
override fun build(context: Context, meta: Meta): SlfLogManager = SlfLogManager()
override val tag: PluginTag = PluginTag(group = PluginTag.DATAFORGE_GROUP, name = "log.kotlinLogging")
override val type: KClass<out SlfLogManager> = SlfLogManager::class
}
}
internal actual val globalLoggerFactory: PluginFactory<out LogManager> = SlfLogManager
internal actual fun getGlobalLoggerFactory(): PluginFactory<out LogManager> = SlfLogManager

View File

@ -1,30 +1,32 @@
package space.kscience.dataforge.provider
import space.kscience.dataforge.context.Context
import space.kscience.dataforge.context.PluginBuilder
import space.kscience.dataforge.context.gather
import space.kscience.dataforge.misc.DFExperimental
import space.kscience.dataforge.misc.Type
import space.kscience.dataforge.misc.DfId
import space.kscience.dataforge.misc.Named
import space.kscience.dataforge.names.Name
import kotlin.reflect.KClass
import kotlin.reflect.full.findAnnotation
@DFExperimental
public val KClass<*>.dfType: String
get() = findAnnotation<Type>()?.id ?: simpleName ?: ""
public val KClass<*>.dfId: String
get() = findAnnotation<DfId>()?.id ?: simpleName ?: ""
/**
* Provide an object with given name inferring target from its type using [Type] annotation
* Provide an object with given name inferring target from its type using [DfId] annotation
*/
@DFExperimental
public inline fun <reified T : Any> Provider.provideByType(name: String): T? {
val target = T::class.dfType
val target = T::class.dfId
return provide(target, name)
}
@DFExperimental
public inline fun <reified T : Any> Provider.top(): Map<Name, T> {
val target = T::class.dfType
val target = T::class.dfId
return top(target)
}
@ -33,5 +35,15 @@ public inline fun <reified T : Any> Provider.top(): Map<Name, T> {
*/
@DFExperimental
public inline fun <reified T : Any> Context.gather(inherit: Boolean = true): Map<Name, T> =
gather<T>(T::class.dfType, inherit)
gather<T>(T::class.dfId, inherit)
@DFExperimental
public inline fun <reified T : Any> PluginBuilder.provides(items: Map<Name, T>) {
provides(T::class.dfId, items)
}
@DFExperimental
public inline fun <reified T : Any> PluginBuilder.provides(vararg items: Named) {
provides(T::class.dfId, *items)
}

View File

@ -1,4 +1,4 @@
package space.kscience.dataforge.context
internal actual val globalLoggerFactory: PluginFactory<out LogManager> = DefaultLogManager
internal actual fun getGlobalLoggerFactory(): PluginFactory<out LogManager> = DefaultLogManager

23
dataforge-data/README.md Normal file
View File

@ -0,0 +1,23 @@
# Module dataforge-data
## Usage
## Artifact:
The Maven coordinates of this project are `space.kscience:dataforge-data:0.7.0`.
**Gradle Kotlin DSL:**
```kotlin
repositories {
maven("https://repo.kotlin.link")
//uncomment to access development builds
//maven("https://maven.pkg.jetbrains.space/spc/p/sci/dev")
mavenCentral()
}
dependencies {
implementation("space.kscience:dataforge-data:0.7.0")
}
```

View File

@ -1,23 +1,18 @@
plugins {
id("ru.mipt.npm.gradle.mpp")
id("ru.mipt.npm.gradle.native")
id("space.kscience.gradle.mpp")
}
kscience{
jvm()
js()
native()
useCoroutines()
}
kotlin {
sourceSets {
commonMain{
dependencies {
api(project(":dataforge-meta"))
api(kotlin("reflect"))
}
}
dependencies {
api(project(":dataforge-meta"))
api(kotlin("reflect"))
}
}
readme{
maturity = ru.mipt.npm.gradle.Maturity.EXPERIMENTAL
maturity = space.kscience.gradle.Maturity.EXPERIMENTAL
}

View File

@ -0,0 +1,65 @@
package space.kscience.dataforge.actions
import kotlinx.coroutines.launch
import space.kscience.dataforge.data.*
import space.kscience.dataforge.meta.Meta
import space.kscience.dataforge.misc.DFInternal
import space.kscience.dataforge.names.Name
import space.kscience.dataforge.names.startsWith
import kotlin.reflect.KType
/**
* Remove all values with keys starting with [name]
*/
internal fun MutableMap<Name, *>.removeWhatStartsWith(name: Name) {
val toRemove = keys.filter { it.startsWith(name) }
toRemove.forEach(::remove)
}
/**
* An action that caches results on-demand and recalculates them on source push
*/
public abstract class AbstractAction<in T : Any, R : Any>(
public val outputType: KType,
) : Action<T, R> {
/**
* Generate initial content of the output
*/
protected abstract fun DataSetBuilder<R>.generate(
data: DataSet<T>,
meta: Meta,
)
/**
* Update part of the data set when given [updateKey] is triggered by the source
*/
protected open fun DataSourceBuilder<R>.update(
dataSet: DataSet<T>,
meta: Meta,
updateKey: Name,
) {
// By default, recalculate the whole dataset
generate(dataSet, meta)
}
@OptIn(DFInternal::class)
override fun execute(
dataSet: DataSet<T>,
meta: Meta,
): DataSet<R> = if (dataSet is DataSource) {
DataSource(outputType, dataSet){
generate(dataSet, meta)
launch {
dataSet.updates.collect { name ->
update(dataSet, meta, name)
}
}
}
} else {
DataTree<R>(outputType) {
generate(dataSet, meta)
}
}
}

View File

@ -1,6 +1,5 @@
package space.kscience.dataforge.actions
import kotlinx.coroutines.CoroutineScope
import space.kscience.dataforge.data.DataSet
import space.kscience.dataforge.meta.Meta
import space.kscience.dataforge.misc.DFExperimental
@ -9,13 +8,12 @@ import space.kscience.dataforge.misc.DFExperimental
* A simple data transformation on a data node. Actions should avoid doing actual dependency evaluation in [execute].
*/
public interface Action<in T : Any, out R : Any> {
/**
* Transform the data in the node, producing a new node. By default it is assumed that all calculations are lazy
* Transform the data in the node, producing a new node. By default, it is assumed that all calculations are lazy
* so not actual computation is started at this moment.
*
* [scope] context used to compute the initial result, also it is used for updates propagation
*/
public suspend fun execute(dataSet: DataSet<T>, meta: Meta = Meta.EMPTY, scope: CoroutineScope? = null): DataSet<R>
public fun execute(dataSet: DataSet<T>, meta: Meta = Meta.EMPTY): DataSet<R>
public companion object
}
@ -26,16 +24,17 @@ public interface Action<in T : Any, out R : Any> {
public infix fun <T : Any, I : Any, R : Any> Action<T, I>.then(action: Action<I, R>): Action<T, R> {
// TODO introduce composite action and add optimize by adding action to the list
return object : Action<T, R> {
override suspend fun execute(dataSet: DataSet<T>, meta: Meta, scope: CoroutineScope?): DataSet<R> {
return action.execute(this@then.execute(dataSet, meta, scope), meta, scope)
}
override fun execute(
dataSet: DataSet<T>,
meta: Meta,
): DataSet<R> = action.execute(this@then.execute(dataSet, meta), meta)
}
}
@DFExperimental
public suspend fun <T : Any, R : Any> DataSet<T>.transformWith(
action: Action<T, R>,
public operator fun <T : Any, R : Any> Action<T, R>.invoke(
dataSet: DataSet<T>,
meta: Meta = Meta.EMPTY,
scope: CoroutineScope? = null,
): DataSet<R> = action.execute(this, meta, scope)
): DataSet<R> = execute(dataSet, meta)

View File

@ -1,9 +1,5 @@
package space.kscience.dataforge.actions
import kotlinx.coroutines.CoroutineScope
import kotlinx.coroutines.flow.collect
import kotlinx.coroutines.flow.map
import kotlinx.coroutines.launch
import space.kscience.dataforge.data.*
import space.kscience.dataforge.meta.Meta
import space.kscience.dataforge.meta.MutableMeta
@ -29,66 +25,71 @@ public data class ActionEnv(
* Action environment
*/
@DFBuilder
public class MapActionBuilder<T, R>(public var name: Name, public var meta: MutableMeta, public val actionMeta: Meta) {
public class MapActionBuilder<T, R>(
public var name: Name,
public var meta: MutableMeta,
public val actionMeta: Meta,
@PublishedApi internal var outputType: KType,
) {
public lateinit var result: suspend ActionEnv.(T) -> R
/**
* Set unsafe [outputType] for the resulting data. Be sure that it is correct.
*/
public fun <R1 : R> result(outputType: KType, f: suspend ActionEnv.(T) -> R1) {
this.outputType = outputType
result = f;
}
/**
* Calculate the result of goal
*/
public fun result(f: suspend ActionEnv.(T) -> R) {
public inline fun <reified R1 : R> result(noinline f: suspend ActionEnv.(T) -> R1) {
outputType = typeOf<R1>()
result = f;
}
}
@PublishedApi
internal class MapAction<in T : Any, out R : Any>(
private val outputType: KType,
internal class MapAction<in T : Any, R : Any>(
outputType: KType,
private val block: MapActionBuilder<T, R>.() -> Unit,
) : Action<T, R> {
) : AbstractAction<T, R>(outputType) {
override suspend fun execute(
dataSet: DataSet<T>,
meta: Meta,
scope: CoroutineScope?,
): DataSet<R> {
suspend fun mapOne(data: NamedData<T>): NamedData<R> {
// Creating a new environment for action using **old** name, old meta and task meta
val env = ActionEnv(data.name, data.meta, meta)
private fun DataSetBuilder<R>.mapOne(name: Name, data: Data<T>, meta: Meta) {
// Creating a new environment for action using **old** name, old meta and task meta
val env = ActionEnv(name, data.meta, meta)
//applying transformation from builder
val builder = MapActionBuilder<T, R>(
data.name,
data.meta.toMutableMeta(), // using data meta
meta
).apply(block)
//applying transformation from builder
val builder = MapActionBuilder<T, R>(
name,
data.meta.toMutableMeta(), // using data meta
meta,
outputType
).apply(block)
//getting new name
val newName = builder.name
//getting new name
val newName = builder.name
//getting new meta
val newMeta = builder.meta.seal()
//getting new meta
val newMeta = builder.meta.seal()
@OptIn(DFInternal::class)
val newData = Data(outputType, newMeta, dependencies = listOf(data)) {
builder.result(env, data.await())
}
//setting the data node
return newData.named(newName)
@OptIn(DFInternal::class)
val newData = Data(builder.outputType, newMeta, dependencies = listOf(data)) {
builder.result(env, data.await())
}
//setting the data node
data(newName, newData)
}
val flow = dataSet.flow().map(::mapOne)
override fun DataSetBuilder<R>.generate(data: DataSet<T>, meta: Meta) {
data.forEach { mapOne(it.name, it.data, meta) }
}
return ActiveDataTree(outputType) {
populate(flow)
scope?.launch {
dataSet.updates.collect { name ->
//clear old nodes
remove(name)
//collect new items
populate(dataSet.flowChildren(name).map(::mapOne))
}
}
}
override fun DataSourceBuilder<R>.update(dataSet: DataSet<T>, meta: Meta, updateKey: Name) {
remove(updateKey)
dataSet[updateKey]?.let { mapOne(updateKey, it, meta) }
}
}

View File

@ -1,9 +1,5 @@
package space.kscience.dataforge.actions
import kotlinx.coroutines.CoroutineScope
import kotlinx.coroutines.flow.Flow
import kotlinx.coroutines.flow.flow
import kotlinx.coroutines.flow.fold
import space.kscience.dataforge.data.*
import space.kscience.dataforge.meta.Meta
import space.kscience.dataforge.meta.MutableMeta
@ -11,17 +7,28 @@ import space.kscience.dataforge.misc.DFBuilder
import space.kscience.dataforge.misc.DFExperimental
import space.kscience.dataforge.misc.DFInternal
import space.kscience.dataforge.names.Name
import space.kscience.dataforge.names.parseAsName
import kotlin.reflect.KType
import kotlin.reflect.typeOf
public class JoinGroup<T : Any, R : Any>(public var name: String, internal val set: DataSet<T>) {
public class JoinGroup<T : Any, R : Any>(
public var name: String,
internal val set: DataSet<T>,
@PublishedApi internal var outputType: KType,
) {
public var meta: MutableMeta = MutableMeta()
public lateinit var result: suspend ActionEnv.(Map<Name, T>) -> R
public lateinit var result: suspend ActionEnv.(Map<Name, ValueWithMeta<T>>) -> R
public fun result(f: suspend ActionEnv.(Map<Name, T>) -> R) {
internal fun <R1 : R> result(outputType: KType, f: suspend ActionEnv.(Map<Name, ValueWithMeta<T>>) -> R1) {
this.outputType = outputType
this.result = f;
}
public inline fun <reified R1 : R> result(noinline f: suspend ActionEnv.(Map<Name, ValueWithMeta<T>>) -> R1) {
outputType = typeOf<R1>()
this.result = f;
}
@ -29,31 +36,30 @@ public class JoinGroup<T : Any, R : Any>(public var name: String, internal val s
@DFBuilder
public class ReduceGroupBuilder<T : Any, R : Any>(
private val inputType: KType,
private val scope: CoroutineScope,
public val actionMeta: Meta,
private val outputType: KType,
) {
private val groupRules: MutableList<suspend (DataSet<T>) -> List<JoinGroup<T, R>>> = ArrayList();
private val groupRules: MutableList<(DataSet<T>) -> List<JoinGroup<T, R>>> = ArrayList();
/**
* introduce grouping by meta value
*/
public fun byValue(tag: String, defaultTag: String = "@default", action: JoinGroup<T, R>.() -> Unit) {
groupRules += { node ->
GroupRule.byMetaValue(scope, tag, defaultTag).gather(node).map {
JoinGroup<T, R>(it.key, it.value).apply(action)
GroupRule.byMetaValue(tag, defaultTag).gather(node).map {
JoinGroup<T, R>(it.key, it.value, outputType).apply(action)
}
}
}
public fun group(
groupName: String,
filter: suspend (Name, Data<T>) -> Boolean,
predicate: (Name, Meta) -> Boolean,
action: JoinGroup<T, R>.() -> Unit,
) {
groupRules += { source ->
listOf(
JoinGroup<T, R>(groupName, source.filter(filter)).apply(action)
JoinGroup<T, R>(groupName, source.filter(predicate), outputType).apply(action)
)
}
}
@ -61,30 +67,27 @@ public class ReduceGroupBuilder<T : Any, R : Any>(
/**
* Apply transformation to the whole node
*/
public fun result(resultName: String, f: suspend ActionEnv.(Map<Name, T>) -> R) {
public fun result(resultName: String, f: suspend ActionEnv.(Map<Name, ValueWithMeta<T>>) -> R) {
groupRules += { node ->
listOf(JoinGroup<T, R>(resultName, node).apply { result(f) })
listOf(JoinGroup<T, R>(resultName, node, outputType).apply { result(outputType, f) })
}
}
internal suspend fun buildGroups(input: DataSet<T>): List<JoinGroup<T, R>> {
return groupRules.flatMap { it.invoke(input) }
}
internal fun buildGroups(input: DataSet<T>): List<JoinGroup<T, R>> =
groupRules.flatMap { it.invoke(input) }
}
@PublishedApi
internal class ReduceAction<T : Any, R : Any>(
private val inputType: KType,
outputType: KType,
private val action: ReduceGroupBuilder<T, R>.() -> Unit,
) : CachingAction<T, R>(outputType) {
//TODO optimize reduction. Currently the whole action recalculates on push
) : AbstractAction<T, R>(outputType) {
//TODO optimize reduction. Currently, the whole action recalculates on push
override fun CoroutineScope.transform(set: DataSet<T>, meta: Meta, key: Name): Flow<NamedData<R>> = flow {
ReduceGroupBuilder<T, R>(inputType, this@transform, meta).apply(action).buildGroups(set).forEach { group ->
val dataFlow: Map<Name, Data<T>> = group.set.flow().fold(HashMap()) { acc, value ->
override fun DataSetBuilder<R>.generate(data: DataSet<T>, meta: Meta) {
ReduceGroupBuilder<T, R>(meta, outputType).apply(action).buildGroups(data).forEach { group ->
val dataFlow: Map<Name, Data<T>> = group.set.asSequence().fold(HashMap()) { acc, value ->
acc.apply {
acc[value.name] = value.data
}
@ -94,13 +97,13 @@ internal class ReduceAction<T : Any, R : Any>(
val groupMeta = group.meta
val env = ActionEnv(Name.parse(groupName), groupMeta, meta)
val env = ActionEnv(groupName.parseAsName(), groupMeta, meta)
@OptIn(DFInternal::class) val res: Data<R> = dataFlow.reduceToData(
outputType,
group.outputType,
meta = groupMeta
) { group.result.invoke(env, it) }
emit(res.named(env.name))
data(env.name, res)
}
}
}
@ -109,7 +112,6 @@ internal class ReduceAction<T : Any, R : Any>(
* A one-to-one mapping action
*/
@DFExperimental
@Suppress("FunctionName")
public inline fun <reified T : Any, reified R : Any> Action.Companion.reduce(
noinline builder: ReduceGroupBuilder<T, R>.() -> Unit,
): Action<T, R> = ReduceAction(typeOf<T>(), typeOf<R>(), builder)
): Action<T, R> = ReduceAction(typeOf<R>(), builder)

View File

@ -1,17 +1,13 @@
package space.kscience.dataforge.actions
import kotlinx.coroutines.CoroutineScope
import kotlinx.coroutines.FlowPreview
import kotlinx.coroutines.flow.*
import kotlinx.coroutines.launch
import space.kscience.dataforge.data.*
import space.kscience.dataforge.meta.Laminate
import space.kscience.dataforge.meta.Meta
import space.kscience.dataforge.meta.MutableMeta
import space.kscience.dataforge.meta.toMutableMeta
import space.kscience.dataforge.misc.DFExperimental
import space.kscience.dataforge.misc.DFInternal
import space.kscience.dataforge.names.Name
import space.kscience.dataforge.names.parseAsName
import kotlin.collections.set
import kotlin.reflect.KType
import kotlin.reflect.typeOf
@ -19,10 +15,15 @@ import kotlin.reflect.typeOf
public class SplitBuilder<T : Any, R : Any>(public val name: Name, public val meta: Meta) {
public class FragmentRule<T : Any, R : Any>(public val name: Name, public var meta: MutableMeta) {
public class FragmentRule<T : Any, R : Any>(
public val name: Name,
public var meta: MutableMeta,
@PublishedApi internal var outputType: KType,
) {
public lateinit var result: suspend (T) -> R
public fun result(f: suspend (T) -> R) {
public inline fun <reified R1 : R> result(noinline f: suspend (T) -> R1) {
this.outputType = typeOf<R1>()
result = f;
}
}
@ -35,7 +36,7 @@ public class SplitBuilder<T : Any, R : Any>(public val name: Name, public val me
* @param rule the rule to transform fragment name and meta using
*/
public fun fragment(name: String, rule: FragmentRule<T, R>.() -> Unit) {
fragments[Name.parse(name)] = rule
fragments[name.parseAsName()] = rule
}
}
@ -44,52 +45,48 @@ public class SplitBuilder<T : Any, R : Any>(public val name: Name, public val me
*/
@PublishedApi
internal class SplitAction<T : Any, R : Any>(
private val outputType: KType,
outputType: KType,
private val action: SplitBuilder<T, R>.() -> Unit,
) : Action<T, R> {
) : AbstractAction<T, R>(outputType) {
@OptIn(FlowPreview::class)
override suspend fun execute(
dataSet: DataSet<T>,
meta: Meta,
scope: CoroutineScope?,
): DataSet<R> {
private fun DataSetBuilder<R>.splitOne(name: Name, data: Data<T>, meta: Meta) {
val laminate = Laminate(data.meta, meta)
suspend fun splitOne(data: NamedData<T>): Flow<NamedData<R>> {
val laminate = Laminate(data.meta, meta)
val split = SplitBuilder<T, R>(data.name, data.meta).apply(action)
val split = SplitBuilder<T, R>(name, data.meta).apply(action)
// apply individual fragment rules to result
return split.fragments.entries.asFlow().map { (fragmentName, rule) ->
val env = SplitBuilder.FragmentRule<T, R>(fragmentName, laminate.toMutableMeta()).apply(rule)
//data.map<R>(outputType, meta = env.meta) { env.result(it) }.named(fragmentName)
@OptIn(DFInternal::class) Data(outputType, meta = env.meta, dependencies = listOf(data)) {
// apply individual fragment rules to result
split.fragments.forEach { (fragmentName, rule) ->
val env = SplitBuilder.FragmentRule<T, R>(
fragmentName,
laminate.toMutableMeta(),
outputType
).apply(rule)
//data.map<R>(outputType, meta = env.meta) { env.result(it) }.named(fragmentName)
data(
fragmentName,
@Suppress("OPT_IN_USAGE") Data(outputType, meta = env.meta, dependencies = listOf(data)) {
env.result(data.await())
}.named(fragmentName)
}
}
return ActiveDataTree<R>(outputType) {
populate(dataSet.flow().flatMapConcat(transform = ::splitOne))
scope?.launch {
dataSet.updates.collect { name ->
//clear old nodes
remove(name)
//collect new items
populate(dataSet.flowChildren(name).flatMapConcat(transform = ::splitOne))
}
}
)
}
}
override fun DataSetBuilder<R>.generate(data: DataSet<T>, meta: Meta) {
data.forEach { splitOne(it.name, it.data, meta) }
}
override fun DataSourceBuilder<R>.update(dataSet: DataSet<T>, meta: Meta, updateKey: Name) {
remove(updateKey)
dataSet[updateKey]?.let { splitOne(updateKey, it, meta) }
}
}
/**
* Action that splits each incoming element into a number of fragments defined in builder
*/
@DFExperimental
@Suppress("FunctionName")
public inline fun <T : Any, reified R : Any> Action.Companion.split(
noinline builder: SplitBuilder<T, R>.() -> Unit,
): Action<T, R> = SplitAction(typeOf<R>(), builder)

View File

@ -1,118 +0,0 @@
package space.kscience.dataforge.data
import kotlinx.coroutines.CoroutineScope
import kotlinx.coroutines.Job
import kotlinx.coroutines.flow.*
import kotlinx.coroutines.launch
import kotlinx.coroutines.sync.Mutex
import kotlinx.coroutines.sync.withLock
import space.kscience.dataforge.meta.*
import space.kscience.dataforge.names.*
import kotlin.reflect.KType
import kotlin.reflect.typeOf
/**
* A mutable [DataTree.Companion.active]. It
*/
public class ActiveDataTree<T : Any>(
override val dataType: KType,
) : DataTree<T>, DataSetBuilder<T>, ActiveDataSet<T> {
private val mutex = Mutex()
private val treeItems = HashMap<NameToken, DataTreeItem<T>>()
override suspend fun items(): Map<NameToken, DataTreeItem<T>> = mutex.withLock {
treeItems.filter { !it.key.body.startsWith("@") }
}
private val _updates = MutableSharedFlow<Name>()
override val updates: Flow<Name>
get() = _updates
private suspend fun remove(token: NameToken) {
mutex.withLock {
if (treeItems.remove(token) != null) {
_updates.emit(token.asName())
}
}
}
override suspend fun remove(name: Name) {
if (name.isEmpty()) error("Can't remove the root node")
(getItem(name.cutLast()).tree as? ActiveDataTree)?.remove(name.lastOrNull()!!)
}
private suspend fun set(token: NameToken, data: Data<T>) {
mutex.withLock {
treeItems[token] = DataTreeItem.Leaf(data)
}
}
private suspend fun getOrCreateNode(token: NameToken): ActiveDataTree<T> =
(treeItems[token] as? DataTreeItem.Node<T>)?.tree as? ActiveDataTree<T>
?: ActiveDataTree<T>(dataType).also {
mutex.withLock {
treeItems[token] = DataTreeItem.Node(it)
}
}
private suspend fun getOrCreateNode(name: Name): ActiveDataTree<T> {
return when (name.length) {
0 -> this
1 -> getOrCreateNode(name.firstOrNull()!!)
else -> getOrCreateNode(name.firstOrNull()!!).getOrCreateNode(name.cutFirst())
}
}
override suspend fun emit(name: Name, data: Data<T>?) {
if (data == null) {
remove(name)
} else {
when (name.length) {
0 -> error("Can't add data with empty name")
1 -> set(name.firstOrNull()!!, data)
2 -> getOrCreateNode(name.cutLast()).set(name.lastOrNull()!!, data)
}
}
_updates.emit(name)
}
/**
* Copy given data set and mirror its changes to this [ActiveDataTree] in [this@setAndObserve]. Returns an update [Job]
*/
public fun CoroutineScope.setAndObserve(name: Name, dataSet: DataSet<T>): Job = launch {
emit(name, dataSet)
dataSet.updates.collect { nameInBranch ->
emit(name + nameInBranch, dataSet.getData(nameInBranch))
}
}
}
/**
* Create a dynamic tree. Initial data is placed synchronously. Updates are propagated via [updatesScope]
*/
@Suppress("FunctionName")
public suspend fun <T : Any> ActiveDataTree(
type: KType,
block: suspend ActiveDataTree<T>.() -> Unit,
): ActiveDataTree<T> {
val tree = ActiveDataTree<T>(type)
tree.block()
return tree
}
@Suppress("FunctionName")
public suspend inline fun <reified T : Any> ActiveDataTree(
crossinline block: suspend ActiveDataTree<T>.() -> Unit,
): ActiveDataTree<T> = ActiveDataTree<T>(typeOf<T>()).apply { block() }
public suspend inline fun <reified T : Any> ActiveDataTree<T>.emit(
name: Name,
noinline block: suspend ActiveDataTree<T>.() -> Unit,
): Unit = emit(name, ActiveDataTree(typeOf<T>(), block))
public suspend inline fun <reified T : Any> ActiveDataTree<T>.emit(
name: String,
noinline block: suspend ActiveDataTree<T>.() -> Unit,
): Unit = emit(Name.parse(name), ActiveDataTree(typeOf<T>(), block))

View File

@ -1,52 +0,0 @@
package space.kscience.dataforge.data
import kotlinx.coroutines.CoroutineScope
import kotlinx.coroutines.coroutineScope
import kotlinx.coroutines.flow.Flow
import kotlinx.coroutines.flow.collect
import space.kscience.dataforge.actions.Action
import space.kscience.dataforge.meta.Meta
import space.kscience.dataforge.names.Name
import space.kscience.dataforge.names.startsWith
import kotlin.reflect.KType
/**
* Remove all values with keys starting with [name]
*/
internal fun MutableMap<Name, *>.removeWhatStartsWith(name: Name) {
val toRemove = keys.filter { it.startsWith(name) }
toRemove.forEach(::remove)
}
/**
* An action that caches results on-demand and recalculates them on source push
*/
public abstract class CachingAction<in T : Any, out R : Any>(
public val outputType: KType,
) : Action<T, R> {
protected abstract fun CoroutineScope.transform(
set: DataSet<T>,
meta: Meta,
key: Name = Name.EMPTY,
): Flow<NamedData<R>>
override suspend fun execute(
dataSet: DataSet<T>,
meta: Meta,
scope: CoroutineScope?,
): DataSet<R> = ActiveDataTree<R>(outputType) {
coroutineScope {
populate(transform(dataSet, meta))
}
scope?.let {
dataSet.updates.collect {
//clear old nodes
remove(it)
//collect new items
populate(scope.transform(dataSet, meta, it))
//FIXME if the target is data, updates are fired twice
}
}
}
}

View File

@ -5,7 +5,7 @@ import space.kscience.dataforge.meta.Meta
import space.kscience.dataforge.meta.MetaRepr
import space.kscience.dataforge.meta.isEmpty
import space.kscience.dataforge.misc.DFInternal
import space.kscience.dataforge.misc.Type
import space.kscience.dataforge.misc.DfId
import kotlin.coroutines.CoroutineContext
import kotlin.coroutines.EmptyCoroutineContext
import kotlin.reflect.KType
@ -14,8 +14,8 @@ import kotlin.reflect.typeOf
/**
* A data element characterized by its meta
*/
@Type(Data.TYPE)
public interface Data<out T : Any> : Goal<T>, MetaRepr {
@DfId(Data.TYPE)
public interface Data<out T> : Goal<T>, MetaRepr {
/**
* Type marker for the data. The type is known before the calculation takes place so it could be checked.
*/
@ -73,7 +73,7 @@ private class LazyData<T : Any>(
override val type: KType,
override val meta: Meta = Meta.EMPTY,
additionalContext: CoroutineContext = EmptyCoroutineContext,
dependencies: Collection<Data<*>> = emptyList(),
dependencies: Collection<Goal<*>> = emptyList(),
block: suspend () -> T,
) : Data<T>, LazyGoal<T>(additionalContext, dependencies, block)
@ -83,13 +83,17 @@ public class StaticData<T : Any>(
override val meta: Meta = Meta.EMPTY,
) : Data<T>, StaticGoal<T>(value)
@Suppress("FunctionName")
public inline fun <reified T : Any> Data(value: T, meta: Meta = Meta.EMPTY): StaticData<T> =
StaticData(typeOf<T>(), value, meta)
@Suppress("FunctionName")
@DFInternal
public fun <T : Any> Data(
type: KType,
meta: Meta = Meta.EMPTY,
context: CoroutineContext = EmptyCoroutineContext,
dependencies: Collection<Data<*>> = emptyList(),
dependencies: Collection<Goal<*>> = emptyList(),
block: suspend () -> T,
): Data<T> = LazyData(type, meta, context, dependencies, block)
@ -98,6 +102,6 @@ public fun <T : Any> Data(
public inline fun <reified T : Any> Data(
meta: Meta = Meta.EMPTY,
context: CoroutineContext = EmptyCoroutineContext,
dependencies: Collection<Data<*>> = emptyList(),
dependencies: Collection<Goal<*>> = emptyList(),
noinline block: suspend () -> T,
): Data<T> = Data(typeOf<T>(), meta, context, dependencies, block)

View File

@ -1,11 +1,15 @@
package space.kscience.dataforge.data
import kotlinx.coroutines.*
import kotlinx.coroutines.flow.*
import kotlinx.coroutines.flow.Flow
import kotlinx.coroutines.flow.emptyFlow
import kotlinx.coroutines.flow.mapNotNull
import space.kscience.dataforge.data.Data.Companion.TYPE_OF_NOTHING
import space.kscience.dataforge.meta.Meta
import space.kscience.dataforge.meta.set
import space.kscience.dataforge.names.*
import space.kscience.dataforge.names.Name
import space.kscience.dataforge.names.asName
import space.kscience.dataforge.names.endsWith
import space.kscience.dataforge.names.parseAsName
import kotlin.reflect.KType
public interface DataSet<out T : Any> {
@ -16,23 +20,19 @@ public interface DataSet<out T : Any> {
public val dataType: KType
/**
* Traverse this provider or its child. The order is not guaranteed.
* [root] points to a root name for traversal. If it is empty, traverse this source, if it points to a [Data],
* return flow, that contains single [Data], if it points to a node with children, return children.
* Meta-data associated with this node. If no meta is provided, returns [Meta.EMPTY].
*/
public fun flow(): Flow<NamedData<T>>
public val meta: Meta
/**
* Traverse this [DataSet] returning named data instances. The order is not guaranteed.
*/
public operator fun iterator(): Iterator<NamedData<T>>
/**
* Get data with given name.
*/
public suspend fun getData(name: Name): Data<T>?
/**
* Get a snapshot of names of top level children of given node. Empty if node does not exist or is a leaf.
*/
public suspend fun listTop(prefix: Name = Name.EMPTY): List<Name> =
flow().map { it.name }.filter { it.startsWith(prefix) && (it.length == prefix.length + 1) }.toList()
// By default traverses the whole tree. Could be optimized in descendants
public operator fun get(name: Name): Data<T>?
public companion object {
public val META_KEY: Name = "@meta".asName()
@ -42,17 +42,35 @@ public interface DataSet<out T : Any> {
*/
public val EMPTY: DataSet<Nothing> = object : DataSet<Nothing> {
override val dataType: KType = TYPE_OF_NOTHING
override val meta: Meta get() = Meta.EMPTY
private val nothing: Nothing get() = error("this is nothing")
override fun iterator(): Iterator<NamedData<Nothing>> = emptySequence<NamedData<Nothing>>().iterator()
override fun flow(): Flow<NamedData<Nothing>> = emptyFlow()
override suspend fun getData(name: Name): Data<Nothing>? = null
override fun get(name: Name): Data<Nothing>? = null
}
}
}
public interface ActiveDataSet<T : Any> : DataSet<T> {
public fun <T : Any> DataSet<T>.asSequence(): Sequence<NamedData<T>> = object : Sequence<NamedData<T>> {
override fun iterator(): Iterator<NamedData<T>> = this@asSequence.iterator()
}
/**
* Return a single [Data] in this [DataSet]. Throw error if it is not single.
*/
public fun <T : Any> DataSet<T>.single(): NamedData<T> = asSequence().single()
public fun <T : Any> DataSet<T>.asIterable(): Iterable<NamedData<T>> = object : Iterable<NamedData<T>> {
override fun iterator(): Iterator<NamedData<T>> = this@asIterable.iterator()
}
public operator fun <T : Any> DataSet<T>.get(name: String): Data<T>? = get(name.parseAsName())
/**
* A [DataSet] with propagated updates.
*/
public interface DataSource<out T : Any> : DataSet<T>, CoroutineScope {
/**
* A flow of updated item names. Updates are propagated in a form of [Flow] of names of updated nodes.
* Those can include new data items and replacement of existing ones. The replaced items could update existing data content
@ -60,30 +78,38 @@ public interface ActiveDataSet<T : Any> : DataSet<T> {
*
*/
public val updates: Flow<Name>
/**
* Stop generating updates from this [DataSource]
*/
public fun close() {
coroutineContext[Job]?.cancel()
}
}
public val <T : Any> DataSet<T>.updates: Flow<Name> get() = if (this is ActiveDataSet) updates else emptyFlow()
/**
* Flow all data nodes with names starting with [branchName]
*/
public fun <T : Any> DataSet<T>.flowChildren(branchName: Name): Flow<NamedData<T>> = this@flowChildren.flow().filter {
it.name.startsWith(branchName)
}
public val <T : Any> DataSet<T>.updates: Flow<Name> get() = if (this is DataSource) updates else emptyFlow()
//
///**
// * Flow all data nodes with names starting with [branchName]
// */
//public fun <T : Any> DataSet<T>.children(branchName: Name): Sequence<NamedData<T>> =
// this@children.asSequence().filter {
// it.name.startsWith(branchName)
// }
/**
* Start computation for all goals in data node and return a job for the whole node
*/
public fun <T : Any> DataSet<T>.startAll(coroutineScope: CoroutineScope): Job = coroutineScope.launch {
flow().map {
asIterable().map {
it.launch(this@launch)
}.toList().joinAll()
}.joinAll()
}
public suspend fun <T : Any> DataSet<T>.join(): Unit = coroutineScope { startAll(this).join() }
public suspend fun <T : Any> DataSet<T>.computeAndJoinAll(): Unit = coroutineScope { startAll(this).join() }
public suspend fun DataSet<*>.toMeta(): Meta = Meta {
flow().collect {
public fun DataSet<*>.toMeta(): Meta = Meta {
forEach {
if (it.name.endsWith(DataSet.META_KEY)) {
set(it.name, it.meta)
} else {
@ -95,4 +121,4 @@ public suspend fun DataSet<*>.toMeta(): Meta = Meta {
}
}
public val <T : Any> DataSet<T>.updatesWithData: Flow<NamedData<T>> get() = updates.mapNotNull { getData(it)?.named(it) }
public val <T : Any> DataSet<T>.updatesWithData: Flow<NamedData<T>> get() = updates.mapNotNull { get(it)?.named(it) }

View File

@ -1,12 +1,10 @@
package space.kscience.dataforge.data
import kotlinx.coroutines.coroutineScope
import kotlinx.coroutines.flow.Flow
import kotlinx.coroutines.flow.collect
import space.kscience.dataforge.meta.Meta
import space.kscience.dataforge.meta.MutableMeta
import space.kscience.dataforge.misc.DFExperimental
import space.kscience.dataforge.names.Name
import space.kscience.dataforge.names.isEmpty
import space.kscience.dataforge.names.plus
import kotlin.reflect.KType
@ -16,137 +14,152 @@ public interface DataSetBuilder<in T : Any> {
/**
* Remove all data items starting with [name]
*/
public suspend fun remove(name: Name)
public fun remove(name: Name)
public suspend fun emit(name: Name, data: Data<T>?)
public fun data(name: Name, data: Data<T>?)
/**
* Set a current state of given [dataSet] into a branch [name]. Does not propagate updates
*/
public suspend fun emit(name: Name, dataSet: DataSet<T>) {
public fun node(name: Name, dataSet: DataSet<T>) {
//remove previous items
if (name != Name.EMPTY) {
remove(name)
}
//Set new items
dataSet.flow().collect {
emit(name + it.name, it.data)
dataSet.forEach {
data(name + it.name, it.data)
}
}
/**
* Append data to node
* Set meta for the given node
*/
public suspend infix fun String.put(data: Data<T>): Unit = emit(Name.parse(this), data)
public fun meta(name: Name, meta: Meta)
/**
* Append node
*/
public suspend infix fun String.put(dataSet: DataSet<T>): Unit = emit(Name.parse(this), dataSet)
/**
* Build and append node
*/
public suspend infix fun String.put(block: suspend DataSetBuilder<T>.() -> Unit): Unit = emit(Name.parse(this), block)
}
private class SubSetBuilder<in T : Any>(
/**
* Define meta in this [DataSet]
*/
public fun <T : Any> DataSetBuilder<T>.meta(value: Meta): Unit = meta(Name.EMPTY, value)
/**
* Define meta in this [DataSet]
*/
public fun <T : Any> DataSetBuilder<T>.meta(mutableMeta: MutableMeta.() -> Unit): Unit = meta(Meta(mutableMeta))
@PublishedApi
internal class SubSetBuilder<in T : Any>(
private val parent: DataSetBuilder<T>,
private val branch: Name,
) : DataSetBuilder<T> {
override val dataType: KType get() = parent.dataType
override suspend fun remove(name: Name) {
override fun remove(name: Name) {
parent.remove(branch + name)
}
override suspend fun emit(name: Name, data: Data<T>?) {
parent.emit(branch + name, data)
override fun data(name: Name, data: Data<T>?) {
parent.data(branch + name, data)
}
override suspend fun emit(name: Name, dataSet: DataSet<T>) {
parent.emit(branch + name, dataSet)
override fun node(name: Name, dataSet: DataSet<T>) {
parent.node(branch + name, dataSet)
}
override fun meta(name: Name, meta: Meta) {
parent.meta(branch + name, meta)
}
}
public suspend fun <T : Any> DataSetBuilder<T>.emit(name: Name, block: suspend DataSetBuilder<T>.() -> Unit) {
SubSetBuilder(this, name).apply { block() }
public inline fun <T : Any> DataSetBuilder<T>.node(
name: Name,
crossinline block: DataSetBuilder<T>.() -> Unit,
) {
if (name.isEmpty()) block() else SubSetBuilder(this, name).block()
}
public suspend fun <T : Any> DataSetBuilder<T>.emit(name: String, data: Data<T>) {
emit(Name.parse(name), data)
public fun <T : Any> DataSetBuilder<T>.data(name: String, value: Data<T>) {
data(Name.parse(name), value)
}
public suspend fun <T : Any> DataSetBuilder<T>.emit(name: String, set: DataSet<T>) {
this.emit(Name.parse(name), set)
public fun <T : Any> DataSetBuilder<T>.node(name: String, set: DataSet<T>) {
node(Name.parse(name), set)
}
public suspend fun <T : Any> DataSetBuilder<T>.emit(name: String, block: suspend DataSetBuilder<T>.() -> Unit): Unit =
this@emit.emit(Name.parse(name), block)
public inline fun <T : Any> DataSetBuilder<T>.node(
name: String,
crossinline block: DataSetBuilder<T>.() -> Unit,
): Unit = node(Name.parse(name), block)
public suspend fun <T : Any> DataSetBuilder<T>.emit(data: NamedData<T>) {
emit(data.name, data.data)
public fun <T : Any> DataSetBuilder<T>.set(value: NamedData<T>) {
data(value.name, value.data)
}
/**
* Produce lazy [Data] and emit it into the [DataSetBuilder]
*/
public suspend inline fun <reified T : Any> DataSetBuilder<T>.produce(
public inline fun <reified T : Any> DataSetBuilder<T>.produce(
name: String,
meta: Meta = Meta.EMPTY,
noinline producer: suspend () -> T,
) {
val data = Data(meta, block = producer)
emit(name, data)
data(name, data)
}
public suspend inline fun <reified T : Any> DataSetBuilder<T>.produce(
public inline fun <reified T : Any> DataSetBuilder<T>.produce(
name: Name,
meta: Meta = Meta.EMPTY,
noinline producer: suspend () -> T,
) {
val data = Data(meta, block = producer)
emit(name, data)
data(name, data)
}
/**
* Emit a static data with the fixed value
*/
public suspend inline fun <reified T : Any> DataSetBuilder<T>.static(
public inline fun <reified T : Any> DataSetBuilder<T>.static(
name: String,
data: T,
meta: Meta = Meta.EMPTY
): Unit =
emit(name, Data.static(data, meta))
meta: Meta = Meta.EMPTY,
): Unit = data(name, Data.static(data, meta))
public suspend inline fun <reified T : Any> DataSetBuilder<T>.static(
public inline fun <reified T : Any> DataSetBuilder<T>.static(
name: Name,
data: T,
meta: Meta = Meta.EMPTY
): Unit =
emit(name, Data.static(data, meta))
meta: Meta = Meta.EMPTY,
): Unit = data(name, Data.static(data, meta))
public suspend inline fun <reified T : Any> DataSetBuilder<T>.static(
public inline fun <reified T : Any> DataSetBuilder<T>.static(
name: String,
data: T,
mutableMeta: MutableMeta.() -> Unit,
): Unit = emit(Name.parse(name), Data.static(data, Meta(mutableMeta)))
): Unit = data(Name.parse(name), Data.static(data, Meta(mutableMeta)))
/**
* Update data with given node data and meta with node meta.
*/
@DFExperimental
public suspend fun <T : Any> DataSetBuilder<T>.populate(tree: DataSet<T>): Unit = coroutineScope {
tree.flow().collect {
public fun <T : Any> DataSetBuilder<T>.populateFrom(tree: DataSet<T>): Unit {
tree.forEach {
//TODO check if the place is occupied
emit(it.name, it.data)
data(it.name, it.data)
}
}
public suspend fun <T : Any> DataSetBuilder<T>.populate(flow: Flow<NamedData<T>>) {
flow.collect {
emit(it.name, it.data)
//public fun <T : Any> DataSetBuilder<T>.populateFrom(flow: Flow<NamedData<T>>) {
// flow.collect {
// data(it.name, it.data)
// }
//}
public fun <T : Any> DataSetBuilder<T>.populateFrom(sequence: Sequence<NamedData<T>>) {
sequence.forEach {
data(it.name, it.data)
}
}

View File

@ -1,18 +1,25 @@
package space.kscience.dataforge.data
import kotlinx.coroutines.flow.Flow
import kotlinx.coroutines.flow.emitAll
import kotlinx.coroutines.flow.flow
import kotlinx.coroutines.flow.map
import space.kscience.dataforge.misc.Type
import space.kscience.dataforge.meta.Meta
import space.kscience.dataforge.misc.DFInternal
import space.kscience.dataforge.misc.DfId
import space.kscience.dataforge.names.*
import kotlin.collections.component1
import kotlin.collections.component2
import kotlin.reflect.KType
import kotlin.reflect.typeOf
public sealed class DataTreeItem<out T : Any> {
public class Node<out T : Any>(public val tree: DataTree<T>) : DataTreeItem<T>()
public class Leaf<out T : Any>(public val data: Data<T>) : DataTreeItem<T>()
public abstract val meta: Meta
public class Node<out T : Any>(public val tree: DataTree<T>) : DataTreeItem<T>() {
override val meta: Meta get() = tree.meta
}
public class Leaf<out T : Any>(public val data: Data<T>) : DataTreeItem<T>() {
override val meta: Meta get() = data.meta
}
}
public val <T : Any> DataTreeItem<T>.type: KType
@ -24,63 +31,79 @@ public val <T : Any> DataTreeItem<T>.type: KType
/**
* A tree-like [DataSet] grouped into the node. All data inside the node must inherit its type
*/
@Type(DataTree.TYPE)
@DfId(DataTree.TYPE)
public interface DataTree<out T : Any> : DataSet<T> {
/**
* Children items of this [DataTree] provided asynchronously
* Top-level children items of this [DataTree]
*/
public suspend fun items(): Map<NameToken, DataTreeItem<T>>
public val items: Map<NameToken, DataTreeItem<T>>
override fun flow(): Flow<NamedData<T>> = flow {
items().forEach { (token, childItem: DataTreeItem<T>) ->
if(!token.body.startsWith("@")) {
override val meta: Meta get() = items[META_ITEM_NAME_TOKEN]?.meta ?: Meta.EMPTY
override fun iterator(): Iterator<NamedData<T>> = iterator {
items.forEach { (token, childItem: DataTreeItem<T>) ->
if (!token.body.startsWith("@")) {
when (childItem) {
is DataTreeItem.Leaf -> emit(childItem.data.named(token.asName()))
is DataTreeItem.Node -> emitAll(childItem.tree.flow().map { it.named(token + it.name) })
is DataTreeItem.Leaf -> yield(childItem.data.named(token.asName()))
is DataTreeItem.Node -> yieldAll(childItem.tree.asSequence().map { it.named(token + it.name) })
}
}
}
}
override suspend fun listTop(prefix: Name): List<Name> =
getItem(prefix).tree?.items()?.keys?.map { prefix + it } ?: emptyList()
override suspend fun getData(name: Name): Data<T>? = when (name.length) {
override fun get(name: Name): Data<T>? = when (name.length) {
0 -> null
1 -> items()[name.firstOrNull()!!].data
else -> items()[name.firstOrNull()!!].tree?.getData(name.cutFirst())
1 -> items[name.firstOrNull()!!].data
else -> items[name.firstOrNull()!!].tree?.get(name.cutFirst())
}
public companion object {
public const val TYPE: String = "dataTree"
/**
* A name token used to designate tree node meta
*/
public val META_ITEM_NAME_TOKEN: NameToken = NameToken("@meta")
@DFInternal
public fun <T : Any> emptyWithType(type: KType, meta: Meta = Meta.EMPTY): DataTree<T> = object : DataTree<T> {
override val items: Map<NameToken, DataTreeItem<T>> get() = emptyMap()
override val dataType: KType get() = type
override val meta: Meta get() = meta
}
@OptIn(DFInternal::class)
public inline fun <reified T : Any> empty(meta: Meta = Meta.EMPTY): DataTree<T> =
emptyWithType<T>(typeOf<T>(), meta)
}
}
public suspend fun <T: Any> DataSet<T>.getData(name: String): Data<T>? = getData(Name.parse(name))
public fun <T : Any> DataTree<T>.listChildren(prefix: Name): List<Name> =
getItem(prefix).tree?.items?.keys?.map { prefix + it } ?: emptyList()
/**
* Get a [DataTreeItem] with given [name] or null if the item does not exist
*/
public tailrec suspend fun <T : Any> DataTree<T>.getItem(name: Name): DataTreeItem<T>? = when (name.length) {
public tailrec fun <T : Any> DataTree<T>.getItem(name: Name): DataTreeItem<T>? = when (name.length) {
0 -> DataTreeItem.Node(this)
1 -> items()[name.firstOrNull()]
else -> items()[name.firstOrNull()!!].tree?.getItem(name.cutFirst())
1 -> items[name.firstOrNull()]
else -> items[name.firstOrNull()!!].tree?.getItem(name.cutFirst())
}
public val <T : Any> DataTreeItem<T>?.tree: DataTree<T>? get() = (this as? DataTreeItem.Node<T>)?.tree
public val <T : Any> DataTreeItem<T>?.data: Data<T>? get() = (this as? DataTreeItem.Leaf<T>)?.data
/**
* Flow of all children including nodes
* A [Sequence] of all children including nodes
*/
public fun <T : Any> DataTree<T>.itemFlow(): Flow<Pair<Name, DataTreeItem<T>>> = flow {
items().forEach { (head, item) ->
emit(head.asName() to item)
public fun <T : Any> DataTree<T>.traverseItems(): Sequence<Pair<Name, DataTreeItem<T>>> = sequence {
items.forEach { (head, item) ->
yield(head.asName() to item)
if (item is DataTreeItem.Node) {
val subSequence = item.tree.itemFlow()
val subSequence = item.tree.traverseItems()
.map { (name, data) -> (head.asName() + name) to data }
emitAll(subSequence)
yieldAll(subSequence)
}
}
}
@ -89,8 +112,8 @@ public fun <T : Any> DataTree<T>.itemFlow(): Flow<Pair<Name, DataTreeItem<T>>> =
* Get a branch of this [DataTree] with a given [branchName].
* The difference from similar method for [DataSet] is that internal logic is more simple and the return value is a [DataTree]
*/
public fun <T : Any> DataTree<T>.branch(branchName: Name): DataTree<T> = object : DataTree<T> {
override val dataType: KType get() = this@branch.dataType
@OptIn(DFInternal::class)
public fun <T : Any> DataTree<T>.branch(branchName: Name): DataTree<T> =
getItem(branchName)?.tree ?: DataTree.emptyWithType(dataType)
override suspend fun items(): Map<NameToken, DataTreeItem<T>> = getItem(branchName).tree?.items() ?: emptyMap()
}
public fun <T : Any> DataTree<T>.branch(branchName: String): DataTree<T> = branch(branchName.parseAsName())

View File

@ -0,0 +1,127 @@
package space.kscience.dataforge.data
import kotlinx.coroutines.CoroutineScope
import kotlinx.coroutines.Job
import kotlinx.coroutines.flow.MutableSharedFlow
import kotlinx.coroutines.launch
import space.kscience.dataforge.meta.Meta
import space.kscience.dataforge.misc.DFInternal
import space.kscience.dataforge.misc.ThreadSafe
import space.kscience.dataforge.names.*
import kotlin.collections.set
import kotlin.coroutines.CoroutineContext
import kotlin.coroutines.coroutineContext
import kotlin.reflect.KType
import kotlin.reflect.typeOf
public interface DataSourceBuilder<T : Any> : DataSetBuilder<T>, DataSource<T> {
override val updates: MutableSharedFlow<Name>
}
/**
* A mutable [DataTree] that propagates updates
*/
public class DataTreeBuilder<T : Any>(
override val dataType: KType,
coroutineContext: CoroutineContext,
) : DataTree<T>, DataSourceBuilder<T> {
override val coroutineContext: CoroutineContext =
coroutineContext + Job(coroutineContext[Job]) + GoalExecutionRestriction()
private val treeItems = HashMap<NameToken, DataTreeItem<T>>()
override val items: Map<NameToken, DataTreeItem<T>>
get() = treeItems.filter { !it.key.body.startsWith("@") }
override val updates: MutableSharedFlow<Name> = MutableSharedFlow<Name>()
@ThreadSafe
private fun remove(token: NameToken) {
if (treeItems.remove(token) != null) {
launch {
updates.emit(token.asName())
}
}
}
override fun remove(name: Name) {
if (name.isEmpty()) error("Can't remove the root node")
(getItem(name.cutLast()).tree as? DataTreeBuilder)?.remove(name.lastOrNull()!!)
}
@ThreadSafe
private fun set(token: NameToken, data: Data<T>) {
treeItems[token] = DataTreeItem.Leaf(data)
}
@ThreadSafe
private fun set(token: NameToken, node: DataTree<T>) {
treeItems[token] = DataTreeItem.Node(node)
}
private fun getOrCreateNode(token: NameToken): DataTreeBuilder<T> =
(treeItems[token] as? DataTreeItem.Node<T>)?.tree as? DataTreeBuilder<T>
?: DataTreeBuilder<T>(dataType, coroutineContext).also { set(token, it) }
private fun getOrCreateNode(name: Name): DataTreeBuilder<T> = when (name.length) {
0 -> this
1 -> getOrCreateNode(name.firstOrNull()!!)
else -> getOrCreateNode(name.firstOrNull()!!).getOrCreateNode(name.cutFirst())
}
override fun data(name: Name, data: Data<T>?) {
if (data == null) {
remove(name)
} else {
when (name.length) {
0 -> error("Can't add data with empty name")
1 -> set(name.firstOrNull()!!, data)
2 -> getOrCreateNode(name.cutLast()).set(name.lastOrNull()!!, data)
}
}
launch {
updates.emit(name)
}
}
override fun meta(name: Name, meta: Meta) {
val item = getItem(name)
if (item is DataTreeItem.Leaf) error("TODO: Can't change meta of existing leaf item.")
data(name + DataTree.META_ITEM_NAME_TOKEN, Data.empty(meta))
}
}
/**
* Create a dynamic [DataSource]. Initial data is placed synchronously.
*/
@DFInternal
@Suppress("FunctionName")
public fun <T : Any> DataSource(
type: KType,
parent: CoroutineScope,
block: DataSourceBuilder<T>.() -> Unit,
): DataTreeBuilder<T> = DataTreeBuilder<T>(type, parent.coroutineContext).apply(block)
@Suppress("OPT_IN_USAGE", "FunctionName")
public inline fun <reified T : Any> DataSource(
parent: CoroutineScope,
crossinline block: DataSourceBuilder<T>.() -> Unit,
): DataTreeBuilder<T> = DataSource(typeOf<T>(), parent) { block() }
@Suppress("FunctionName")
public suspend inline fun <reified T : Any> DataSource(
crossinline block: DataSourceBuilder<T>.() -> Unit = {},
): DataTreeBuilder<T> = DataTreeBuilder<T>(typeOf<T>(), coroutineContext).apply { block() }
public inline fun <reified T : Any> DataSourceBuilder<T>.emit(
name: Name,
parent: CoroutineScope,
noinline block: DataSourceBuilder<T>.() -> Unit,
): Unit = node(name, DataSource(parent, block))
public inline fun <reified T : Any> DataSourceBuilder<T>.emit(
name: String,
parent: CoroutineScope,
noinline block: DataSourceBuilder<T>.() -> Unit,
): Unit = node(Name.parse(name), DataSource(parent, block))

View File

@ -67,7 +67,7 @@ public open class LazyGoal<T>(
* If [GoalExecutionRestriction] is present in the [coroutineScope] context, the call could produce a error a warning
* depending on the settings.
*/
@DFExperimental
@OptIn(DFExperimental::class)
override fun async(coroutineScope: CoroutineScope): Deferred<T> {
val log = coroutineScope.coroutineContext[GoalLogger]
// Check if context restricts goal computation

View File

@ -3,11 +3,25 @@ package space.kscience.dataforge.data
import kotlin.coroutines.CoroutineContext
public enum class GoalExecutionRestrictionPolicy {
/**
* Allow eager execution
*/
NONE,
/**
* Give warning on eager execution
*/
WARNING,
/**
* Throw error on eager execution
*/
ERROR
}
/**
* A special coroutine context key that allows or disallows goal execution during configuration time (eager execution).
*/
public class GoalExecutionRestriction(
public val policy: GoalExecutionRestrictionPolicy = GoalExecutionRestrictionPolicy.ERROR,
) : CoroutineContext.Element {

View File

@ -2,6 +2,9 @@ package space.kscience.dataforge.data
import kotlin.coroutines.CoroutineContext
/**
* Coroutine context element that provides logging capabilities
*/
public interface GoalLogger : CoroutineContext.Element {
override val key: CoroutineContext.Key<*> get() = GoalLogger

View File

@ -15,14 +15,13 @@
*/
package space.kscience.dataforge.data
import kotlinx.coroutines.CoroutineScope
import kotlinx.coroutines.flow.collect
import kotlinx.coroutines.launch
import space.kscience.dataforge.meta.get
import space.kscience.dataforge.meta.string
import space.kscience.dataforge.misc.DFInternal
public interface GroupRule {
public suspend fun <T : Any> gather(set: DataSet<T>): Map<String, DataSet<T>>
public fun <T : Any> gather(set: DataSet<T>): Map<String, DataSet<T>>
public companion object {
/**
@ -33,32 +32,45 @@ public interface GroupRule {
* @param defaultTagValue
* @return
*/
@OptIn(DFInternal::class)
public fun byMetaValue(
scope: CoroutineScope,
key: String,
defaultTagValue: String,
): GroupRule = object : GroupRule {
override suspend fun <T : Any> gather(
override fun <T : Any> gather(
set: DataSet<T>,
): Map<String, DataSet<T>> {
val map = HashMap<String, ActiveDataTree<T>>()
val map = HashMap<String, DataSet<T>>()
set.flow().collect { data ->
val tagValue = data.meta[key]?.string ?: defaultTagValue
map.getOrPut(tagValue) { ActiveDataTree(set.dataType) }.emit(data.name, data.data)
}
if (set is DataSource) {
set.forEach { data ->
val tagValue: String = data.meta[key]?.string ?: defaultTagValue
(map.getOrPut(tagValue) { DataTreeBuilder(set.dataType, set.coroutineContext) } as DataTreeBuilder<T>)
.data(data.name, data.data)
scope.launch {
set.updates.collect { name ->
val data = set.getData(name)
set.launch {
set.updates.collect { name ->
val dataUpdate = set[name]
@Suppress("NULLABLE_EXTENSION_OPERATOR_WITH_SAFE_CALL_RECEIVER")
val tagValue = data?.meta?.get(key)?.string ?: defaultTagValue
map.getOrPut(tagValue) { ActiveDataTree(set.dataType) }.emit(name, data)
val updateTagValue = dataUpdate?.meta?.get(key)?.string ?: defaultTagValue
map.getOrPut(updateTagValue) {
DataSource(set.dataType, this) {
data(name, dataUpdate)
}
}
}
}
}
} else {
set.forEach { data ->
val tagValue: String = data.meta[key]?.string ?: defaultTagValue
(map.getOrPut(tagValue) { StaticDataTree(set.dataType) } as StaticDataTree<T>)
.data(data.name, data.data)
}
}
return map
}
}

View File

@ -9,6 +9,9 @@ public interface NamedData<out T : Any> : Named, Data<T> {
public val data: Data<T>
}
public operator fun NamedData<*>.component1(): Name = name
public operator fun <T: Any> NamedData<T>.component2(): Data<T> = data
private class NamedDataImpl<out T : Any>(
override val name: Name,
override val data: Data<T>,

View File

@ -1,7 +1,6 @@
package space.kscience.dataforge.data
import kotlinx.coroutines.coroutineScope
import kotlinx.coroutines.flow.collect
import space.kscience.dataforge.meta.Meta
import space.kscience.dataforge.misc.DFExperimental
import space.kscience.dataforge.names.*
import kotlin.reflect.KType
@ -12,15 +11,16 @@ internal class StaticDataTree<T : Any>(
override val dataType: KType,
) : DataSetBuilder<T>, DataTree<T> {
private val items: MutableMap<NameToken, DataTreeItem<T>> = HashMap()
private val _items: MutableMap<NameToken, DataTreeItem<T>> = HashMap()
override suspend fun items(): Map<NameToken, DataTreeItem<T>> = items.filter { !it.key.body.startsWith("@") }
override val items: Map<NameToken, DataTreeItem<T>>
get() = _items.filter { !it.key.body.startsWith("@") }
override suspend fun remove(name: Name) {
override fun remove(name: Name) {
when (name.length) {
0 -> error("Can't remove root tree node")
1 -> items.remove(name.firstOrNull()!!)
else -> (items[name.firstOrNull()!!].tree as? StaticDataTree<T>)?.remove(name.cutFirst())
1 -> _items.remove(name.firstOrNull()!!)
else -> (_items[name.firstOrNull()!!].tree as? StaticDataTree<T>)?.remove(name.cutFirst())
}
}
@ -28,51 +28,55 @@ internal class StaticDataTree<T : Any>(
0 -> this
1 -> {
val itemName = name.firstOrNull()!!
(items[itemName].tree as? StaticDataTree<T>) ?: StaticDataTree<T>(dataType).also {
items[itemName] = DataTreeItem.Node(it)
(_items[itemName].tree as? StaticDataTree<T>) ?: StaticDataTree<T>(dataType).also {
_items[itemName] = DataTreeItem.Node(it)
}
}
else -> getOrCreateNode(name.cutLast()).getOrCreateNode(name.lastOrNull()!!.asName())
}
private suspend fun set(name: Name, item: DataTreeItem<T>?) {
private fun set(name: Name, item: DataTreeItem<T>?) {
if (name.isEmpty()) error("Can't set top level tree node")
if (item == null) {
remove(name)
} else {
getOrCreateNode(name.cutLast()).items[name.lastOrNull()!!] = item
getOrCreateNode(name.cutLast())._items[name.lastOrNull()!!] = item
}
}
override suspend fun emit(name: Name, data: Data<T>?) {
override fun data(name: Name, data: Data<T>?) {
set(name, data?.let { DataTreeItem.Leaf(it) })
}
override suspend fun emit(name: Name, dataSet: DataSet<T>) {
override fun node(name: Name, dataSet: DataSet<T>) {
if (dataSet is StaticDataTree) {
set(name, DataTreeItem.Node(dataSet))
} else {
coroutineScope {
dataSet.flow().collect {
emit(name + it.name, it.data)
}
dataSet.forEach {
data(name + it.name, it.data)
}
}
}
override fun meta(name: Name, meta: Meta) {
val item = getItem(name)
if (item is DataTreeItem.Leaf) TODO("Can't change meta of existing leaf item.")
data(name + DataTree.META_ITEM_NAME_TOKEN, Data.empty(meta))
}
}
@Suppress("FunctionName")
public suspend fun <T : Any> DataTree(
public inline fun <T : Any> DataTree(
dataType: KType,
block: suspend DataSetBuilder<T>.() -> Unit,
block: DataSetBuilder<T>.() -> Unit,
): DataTree<T> = StaticDataTree<T>(dataType).apply { block() }
@Suppress("FunctionName")
public suspend inline fun <reified T : Any> DataTree(
noinline block: suspend DataSetBuilder<T>.() -> Unit,
public inline fun <reified T : Any> DataTree(
noinline block: DataSetBuilder<T>.() -> Unit,
): DataTree<T> = DataTree(typeOf<T>(), block)
@OptIn(DFExperimental::class)
public suspend fun <T : Any> DataSet<T>.seal(): DataTree<T> = DataTree(dataType){
populate(this@seal)
public fun <T : Any> DataSet<T>.seal(): DataTree<T> = DataTree(dataType) {
populateFrom(this@seal)
}

View File

@ -4,11 +4,11 @@ import kotlinx.coroutines.flow.Flow
import kotlinx.coroutines.flow.filter
import kotlinx.coroutines.flow.map
import kotlinx.coroutines.flow.mapNotNull
import space.kscience.dataforge.meta.Meta
import space.kscience.dataforge.misc.DFExperimental
import space.kscience.dataforge.names.Name
import space.kscience.dataforge.names.isEmpty
import space.kscience.dataforge.names.plus
import space.kscience.dataforge.names.removeHeadOrNull
import space.kscience.dataforge.names.*
import kotlin.coroutines.CoroutineContext
import kotlin.coroutines.EmptyCoroutineContext
import kotlin.reflect.KType
@ -16,34 +16,58 @@ import kotlin.reflect.KType
* A stateless filtered [DataSet]
*/
public fun <T : Any> DataSet<T>.filter(
predicate: suspend (Name, Data<T>) -> Boolean,
): ActiveDataSet<T> = object : ActiveDataSet<T> {
predicate: (Name, Meta) -> Boolean,
): DataSource<T> = object : DataSource<T> {
override val dataType: KType get() = this@filter.dataType
override fun flow(): Flow<NamedData<T>> =
this@filter.flow().filter { predicate(it.name, it.data) }
override val coroutineContext: CoroutineContext
get() = (this@filter as? DataSource)?.coroutineContext ?: EmptyCoroutineContext
override suspend fun getData(name: Name): Data<T>? = this@filter.getData(name)?.takeIf {
predicate(name, it)
override val meta: Meta get() = this@filter.meta
override fun iterator(): Iterator<NamedData<T>> = iterator {
for (d in this@filter) {
if (predicate(d.name, d.meta)) {
yield(d)
}
}
}
override fun get(name: Name): Data<T>? = this@filter.get(name)?.takeIf {
predicate(name, it.meta)
}
override val updates: Flow<Name> = this@filter.updates.filter flowFilter@{ name ->
val theData = this@filter.getData(name) ?: return@flowFilter false
predicate(name, theData)
val theData = this@filter[name] ?: return@flowFilter false
predicate(name, theData.meta)
}
}
/**
* Generate a wrapper data set with a given name prefix appended to all names
*/
public fun <T : Any> DataSet<T>.withNamePrefix(prefix: Name): DataSet<T> = if (prefix.isEmpty()) this
else object : ActiveDataSet<T> {
public fun <T : Any> DataSet<T>.withNamePrefix(prefix: Name): DataSet<T> = if (prefix.isEmpty()) {
this
} else object : DataSource<T> {
override val dataType: KType get() = this@withNamePrefix.dataType
override fun flow(): Flow<NamedData<T>> = this@withNamePrefix.flow().map { it.data.named(prefix + it.name) }
override val coroutineContext: CoroutineContext
get() = (this@withNamePrefix as? DataSource)?.coroutineContext ?: EmptyCoroutineContext
override suspend fun getData(name: Name): Data<T>? =
name.removeHeadOrNull(name)?.let { this@withNamePrefix.getData(it) }
override val meta: Meta get() = this@withNamePrefix.meta
override fun iterator(): Iterator<NamedData<T>> = iterator {
for (d in this@withNamePrefix) {
yield(d.data.named(prefix + d.name))
}
}
override fun get(name: Name): Data<T>? =
name.removeFirstOrNull(name)?.let { this@withNamePrefix.get(it) }
override val updates: Flow<Name> get() = this@withNamePrefix.updates.map { prefix + it }
}
@ -53,22 +77,29 @@ else object : ActiveDataSet<T> {
*/
public fun <T : Any> DataSet<T>.branch(branchName: Name): DataSet<T> = if (branchName.isEmpty()) {
this
} else object : ActiveDataSet<T> {
} else object : DataSource<T> {
override val dataType: KType get() = this@branch.dataType
override fun flow(): Flow<NamedData<T>> = this@branch.flow().mapNotNull {
it.name.removeHeadOrNull(branchName)?.let { name ->
it.data.named(name)
override val coroutineContext: CoroutineContext
get() = (this@branch as? DataSource)?.coroutineContext ?: EmptyCoroutineContext
override val meta: Meta get() = this@branch.meta
override fun iterator(): Iterator<NamedData<T>> = iterator {
for (d in this@branch) {
d.name.removeFirstOrNull(branchName)?.let { name ->
yield(d.data.named(name))
}
}
}
override suspend fun getData(name: Name): Data<T>? = this@branch.getData(branchName + name)
override fun get(name: Name): Data<T>? = this@branch.get(branchName + name)
override val updates: Flow<Name> get() = this@branch.updates.mapNotNull { it.removeHeadOrNull(branchName) }
override val updates: Flow<Name> get() = this@branch.updates.mapNotNull { it.removeFirstOrNull(branchName) }
}
public fun <T : Any> DataSet<T>.branch(branchName: String): DataSet<T> = this@branch.branch(Name.parse(branchName))
public fun <T : Any> DataSet<T>.branch(branchName: String): DataSet<T> = this@branch.branch(branchName.parseAsName())
@DFExperimental
public suspend fun <T : Any> DataSet<T>.rootData(): Data<T>? = getData(Name.EMPTY)
public suspend fun <T : Any> DataSet<T>.rootData(): Data<T>? = get(Name.EMPTY)

View File

@ -1,20 +0,0 @@
package space.kscience.dataforge.data
import space.kscience.dataforge.meta.Meta
import space.kscience.dataforge.meta.MutableMeta
/**
* Get a metadata node for this set if it is present
*/
public suspend fun DataSet<*>.getMeta(): Meta? = getData(DataSet.META_KEY)?.meta
/**
* Add meta-data node to a [DataSet]
*/
public suspend fun DataSetBuilder<*>.meta(meta: Meta): Unit = emit(DataSet.META_KEY, Data.empty(meta))
/**
* Add meta-data node to a [DataSet]
*/
public suspend fun DataSetBuilder<*>.meta(mutableMeta: MutableMeta.() -> Unit): Unit = meta(Meta(mutableMeta))

View File

@ -1,18 +1,26 @@
package space.kscience.dataforge.data
import kotlinx.coroutines.flow.*
import space.kscience.dataforge.meta.Meta
import space.kscience.dataforge.meta.MutableMeta
import space.kscience.dataforge.meta.seal
import space.kscience.dataforge.meta.toMutableMeta
import space.kscience.dataforge.misc.DFInternal
import kotlin.contracts.InvocationKind
import kotlin.contracts.contract
import space.kscience.dataforge.names.Name
import kotlin.coroutines.CoroutineContext
import kotlin.coroutines.EmptyCoroutineContext
import kotlin.reflect.KType
import kotlin.reflect.typeOf
public data class ValueWithMeta<T>(val meta: Meta, val value: T)
public suspend fun <T : Any> Data<T>.awaitWithMeta(): ValueWithMeta<T> = ValueWithMeta(meta, await())
public data class NamedValueWithMeta<T>(val name: Name, val meta: Meta, val value: T)
public suspend fun <T : Any> NamedData<T>.awaitWithMeta(): NamedValueWithMeta<T> =
NamedValueWithMeta(name, meta, await())
/**
* Lazily transform this data to another data. By convention [block] should not use external data (be pure).
* @param coroutineContext additional [CoroutineContext] elements used for data computation.
@ -28,7 +36,7 @@ public inline fun <T : Any, reified R : Any> Data<T>.map(
}
/**
* Combine this data with the other data using [block]. See [map] for other details
* Combine this data with the other data using [block]. See [Data::map] for other details
*/
public inline fun <T1 : Any, T2 : Any, reified R : Any> Data<T1>.combine(
other: Data<T2>,
@ -48,13 +56,13 @@ public inline fun <T1 : Any, T2 : Any, reified R : Any> Data<T1>.combine(
public inline fun <T : Any, reified R : Any> Collection<Data<T>>.reduceToData(
coroutineContext: CoroutineContext = EmptyCoroutineContext,
meta: Meta = Meta.EMPTY,
crossinline block: suspend (Collection<T>) -> R,
crossinline block: suspend (List<ValueWithMeta<T>>) -> R,
): Data<R> = Data(
meta,
coroutineContext,
this
) {
block(map { it.await() })
block(map { it.awaitWithMeta() })
}
@DFInternal
@ -62,17 +70,16 @@ public fun <K, T : Any, R : Any> Map<K, Data<T>>.reduceToData(
outputType: KType,
coroutineContext: CoroutineContext = EmptyCoroutineContext,
meta: Meta = Meta.EMPTY,
block: suspend (Map<K, T>) -> R,
block: suspend (Map<K, ValueWithMeta<T>>) -> R,
): Data<R> = Data(
outputType,
meta,
coroutineContext,
this.values
) {
block(mapValues { it.value.await() })
block(mapValues { it.value.awaitWithMeta() })
}
/**
* Lazily reduce a [Map] of [Data] with any static key.
* @param K type of the map key
@ -82,56 +89,91 @@ public fun <K, T : Any, R : Any> Map<K, Data<T>>.reduceToData(
public inline fun <K, T : Any, reified R : Any> Map<K, Data<T>>.reduceToData(
coroutineContext: CoroutineContext = EmptyCoroutineContext,
meta: Meta = Meta.EMPTY,
noinline block: suspend (Map<K, T>) -> R,
crossinline block: suspend (Map<K, ValueWithMeta<T>>) -> R,
): Data<R> = Data(
meta,
coroutineContext,
this.values
) {
block(mapValues { it.value.await() })
block(mapValues { it.value.awaitWithMeta() })
}
//flow operations
//Iterable operations
/**
* Transform a [Flow] of [NamedData] to a single [Data].
*/
@DFInternal
public suspend fun <T : Any, R : Any> Flow<NamedData<T>>.reduceToData(
public inline fun <T : Any, R : Any> Iterable<Data<T>>.reduceToData(
outputType: KType,
coroutineContext: CoroutineContext = EmptyCoroutineContext,
meta: Meta = Meta.EMPTY,
transformation: suspend (Flow<NamedData<T>>) -> R,
crossinline transformation: suspend (Collection<ValueWithMeta<T>>) -> R,
): Data<R> = Data(
outputType,
meta,
coroutineContext,
toList()
) {
transformation(this)
transformation(map { it.awaitWithMeta() })
}
@OptIn(DFInternal::class)
public suspend inline fun <T : Any, reified R : Any> Flow<NamedData<T>>.reduceToData(
public inline fun <T : Any, reified R : Any> Iterable<Data<T>>.reduceToData(
coroutineContext: CoroutineContext = EmptyCoroutineContext,
meta: Meta = Meta.EMPTY,
noinline transformation: suspend (Flow<NamedData<T>>) -> R,
crossinline transformation: suspend (Collection<ValueWithMeta<T>>) -> R,
): Data<R> = reduceToData(typeOf<R>(), coroutineContext, meta) {
transformation(it)
}
/**
* Fold a flow of named data into a single [Data]
*/
public suspend inline fun <T : Any, reified R : Any> Flow<NamedData<T>>.foldToData(
public inline fun <T : Any, reified R : Any> Iterable<Data<T>>.foldToData(
initial: R,
coroutineContext: CoroutineContext = EmptyCoroutineContext,
meta: Meta = Meta.EMPTY,
noinline block: suspend (result: R, data: NamedData<T>) -> R,
crossinline block: suspend (result: R, data: ValueWithMeta<T>) -> R,
): Data<R> = reduceToData(
coroutineContext, meta
) {
it.fold(initial, block)
it.fold(initial) { acc, t -> block(acc, t) }
}
/**
* Transform an [Iterable] of [NamedData] to a single [Data].
*/
@DFInternal
public inline fun <T : Any, R : Any> Iterable<NamedData<T>>.reduceNamedToData(
outputType: KType,
coroutineContext: CoroutineContext = EmptyCoroutineContext,
meta: Meta = Meta.EMPTY,
crossinline transformation: suspend (Collection<NamedValueWithMeta<T>>) -> R,
): Data<R> = Data(
outputType,
meta,
coroutineContext,
toList()
) {
transformation(map { it.awaitWithMeta() })
}
@OptIn(DFInternal::class)
public inline fun <T : Any, reified R : Any> Iterable<NamedData<T>>.reduceNamedToData(
coroutineContext: CoroutineContext = EmptyCoroutineContext,
meta: Meta = Meta.EMPTY,
crossinline transformation: suspend (Collection<NamedValueWithMeta<T>>) -> R,
): Data<R> = reduceNamedToData(typeOf<R>(), coroutineContext, meta) {
transformation(it)
}
/**
* Fold a [Iterable] of named data into a single [Data]
*/
public inline fun <T : Any, reified R : Any> Iterable<NamedData<T>>.foldNamedToData(
initial: R,
coroutineContext: CoroutineContext = EmptyCoroutineContext,
meta: Meta = Meta.EMPTY,
crossinline block: suspend (result: R, data: NamedValueWithMeta<T>) -> R,
): Data<R> = reduceNamedToData(
coroutineContext, meta
) {
it.fold(initial) { acc, t -> block(acc, t) }
}
//DataSet operations
@ -141,41 +183,39 @@ public suspend fun <T : Any, R : Any> DataSet<T>.map(
outputType: KType,
coroutineContext: CoroutineContext = EmptyCoroutineContext,
metaTransform: MutableMeta.() -> Unit = {},
block: suspend (T) -> R,
block: suspend (NamedValueWithMeta<T>) -> R,
): DataTree<R> = DataTree<R>(outputType) {
populate(
flow().map {
val newMeta = it.meta.toMutableMeta().apply(metaTransform).seal()
Data(outputType, newMeta, coroutineContext, listOf(it)) {
block(it.await())
}.named(it.name)
forEach {
val newMeta = it.meta.toMutableMeta().apply(metaTransform).seal()
val d = Data(outputType, newMeta, coroutineContext, listOf(it)) {
block(it.awaitWithMeta())
}
)
data(it.name, d)
}
}
@OptIn(DFInternal::class)
public suspend inline fun <T : Any, reified R : Any> DataSet<T>.map(
coroutineContext: CoroutineContext = EmptyCoroutineContext,
noinline metaTransform: MutableMeta.() -> Unit = {},
noinline block: suspend (T) -> R,
noinline block: suspend (NamedValueWithMeta<T>) -> R,
): DataTree<R> = map(typeOf<R>(), coroutineContext, metaTransform, block)
public suspend fun <T : Any> DataSet<T>.forEach(block: suspend (NamedData<T>) -> Unit) {
contract { callsInPlace(block, InvocationKind.EXACTLY_ONCE) }
flow().collect {
block(it)
public inline fun <T : Any> DataSet<T>.forEach(block: (NamedData<T>) -> Unit) {
for (d in this) {
block(d)
}
}
public suspend inline fun <T : Any, reified R : Any> DataSet<T>.reduceToData(
public inline fun <T : Any, reified R : Any> DataSet<T>.reduceToData(
coroutineContext: CoroutineContext = EmptyCoroutineContext,
meta: Meta = Meta.EMPTY,
noinline transformation: suspend (Flow<NamedData<T>>) -> R,
): Data<R> = flow().reduceToData(coroutineContext, meta, transformation)
crossinline transformation: suspend (Iterable<NamedValueWithMeta<T>>) -> R,
): Data<R> = asIterable().reduceNamedToData(coroutineContext, meta, transformation)
public suspend inline fun <T : Any, reified R : Any> DataSet<T>.foldToData(
public inline fun <T : Any, reified R : Any> DataSet<T>.foldToData(
initial: R,
coroutineContext: CoroutineContext = EmptyCoroutineContext,
meta: Meta = Meta.EMPTY,
noinline block: suspend (result: R, data: NamedData<T>) -> R,
): Data<R> = flow().foldToData(initial, coroutineContext, meta, block)
crossinline block: suspend (result: R, data: NamedValueWithMeta<T>) -> R,
): Data<R> = asIterable().foldNamedToData(initial, coroutineContext, meta, block)

View File

@ -0,0 +1,2 @@
package space.kscience.dataforge.data

View File

@ -0,0 +1,85 @@
package space.kscience.dataforge.data
import kotlinx.coroutines.flow.Flow
import kotlinx.coroutines.flow.filter
import space.kscience.dataforge.meta.Meta
import space.kscience.dataforge.misc.DFExperimental
import space.kscience.dataforge.names.Name
import kotlin.coroutines.CoroutineContext
import kotlin.coroutines.EmptyCoroutineContext
import kotlin.reflect.KType
import kotlin.reflect.full.isSubtypeOf
import kotlin.reflect.typeOf
/**
* Cast the node to given type if the cast is possible or return null
*/
@Suppress("UNCHECKED_CAST")
private fun <R : Any> Data<*>.castOrNull(type: KType): Data<R>? =
if (!this.type.isSubtypeOf(type)) {
null
} else {
object : Data<R> by (this as Data<R>) {
override val type: KType = type
}
}
/**
* Select all data matching given type and filters. Does not modify paths
*
* @param predicate addition filtering condition based on item name and meta. By default, accepts all
*/
@OptIn(DFExperimental::class)
public fun <R : Any> DataSet<*>.filterByType(
type: KType,
predicate: (name: Name, meta: Meta) -> Boolean = { _, _ -> true },
): DataSource<R> = object : DataSource<R> {
override val dataType = type
override val coroutineContext: CoroutineContext
get() = (this@filterByType as? DataSource)?.coroutineContext ?: EmptyCoroutineContext
override val meta: Meta get() = this@filterByType.meta
private fun checkDatum(name: Name, datum: Data<*>): Boolean = datum.type.isSubtypeOf(type)
&& predicate(name, datum.meta)
override fun iterator(): Iterator<NamedData<R>> = iterator {
for(d in this@filterByType){
if(checkDatum(d.name,d.data)){
@Suppress("UNCHECKED_CAST")
yield(d as NamedData<R>)
}
}
}
override fun get(name: Name): Data<R>? = this@filterByType[name]?.let { datum ->
if (checkDatum(name, datum)) datum.castOrNull(type) else null
}
override val updates: Flow<Name> = this@filterByType.updates.filter { name ->
get(name)?.let { datum ->
checkDatum(name, datum)
} ?: false
}
}
/**
* Select a single datum of the appropriate type
*/
public inline fun <reified R : Any> DataSet<*>.filterByType(
noinline predicate: (name: Name, meta: Meta) -> Boolean = { _, _ -> true },
): DataSet<R> = filterByType(typeOf<R>(), predicate)
/**
* Select a single datum if it is present and of given [type]
*/
public fun <R : Any> DataSet<*>.getByType(type: KType, name: Name): NamedData<R>? =
get(name)?.castOrNull<R>(type)?.named(name)
public inline fun <reified R : Any> DataSet<*>.getByType(name: Name): NamedData<R>? =
this@getByType.getByType(typeOf<R>(), name)
public inline fun <reified R : Any> DataSet<*>.getByType(name: String): NamedData<R>? =
this@getByType.getByType(typeOf<R>(), Name.parse(name))

View File

@ -0,0 +1,40 @@
package space.kscience.dataforge.data
import kotlinx.coroutines.CoroutineScope
import kotlinx.coroutines.Job
import kotlinx.coroutines.launch
import space.kscience.dataforge.names.Name
import space.kscience.dataforge.names.plus
/**
* Append data to node
*/
context(DataSetBuilder<T>) public infix fun <T : Any> String.put(data: Data<T>): Unit =
data(Name.parse(this), data)
/**
* Append node
*/
context(DataSetBuilder<T>) public infix fun <T : Any> String.put(dataSet: DataSet<T>): Unit =
node(Name.parse(this), dataSet)
/**
* Build and append node
*/
context(DataSetBuilder<T>) public infix fun <T : Any> String.put(
block: DataSetBuilder<T>.() -> Unit,
): Unit = node(Name.parse(this), block)
/**
* Copy given data set and mirror its changes to this [DataTreeBuilder] in [this@setAndObserve]. Returns an update [Job]
*/
context(DataSetBuilder<T>) public fun <T : Any> CoroutineScope.setAndWatch(
name: Name,
dataSet: DataSet<T>,
): Job = launch {
node(name, dataSet)
dataSet.updates.collect { nameInBranch ->
data(name + nameInBranch, dataSet.get(nameInBranch))
}
}

View File

@ -1,63 +0,0 @@
package space.kscience.dataforge.data
import kotlinx.coroutines.flow.Flow
import kotlinx.coroutines.flow.filter
import kotlinx.coroutines.flow.map
import space.kscience.dataforge.misc.DFExperimental
import space.kscience.dataforge.names.Name
import space.kscience.dataforge.names.matches
import kotlin.reflect.KType
import kotlin.reflect.full.isSubtypeOf
import kotlin.reflect.typeOf
/**
* Cast the node to given type if the cast is possible or return null
*/
@Suppress("UNCHECKED_CAST")
private fun <R : Any> Data<*>.castOrNull(type: KType): Data<R>? =
if (!this.type.isSubtypeOf(type)) null else object : Data<R> by (this as Data<R>) {
override val type: KType = type
}
/**
* Select all data matching given type and filters. Does not modify paths
*/
@OptIn(DFExperimental::class)
@PublishedApi
internal fun <R : Any> DataSet<*>.select(
type: KType,
namePattern: Name? = null,
): ActiveDataSet<R> = object : ActiveDataSet<R> {
override val dataType = type
override fun flow(): Flow<NamedData<R>> = this@select.flow().filter { datum ->
datum.type.isSubtypeOf(type) && (namePattern == null || datum.name.matches(namePattern))
}.map {
@Suppress("UNCHECKED_CAST")
it as NamedData<R>
}
override suspend fun getData(name: Name): Data<R>? = this@select.getData(name)?.castOrNull(type)
override val updates: Flow<Name> = this@select.updates.filter {
val datum = this@select.getData(it)
datum?.type?.isSubtypeOf(type) ?: false
}
}
/**
* Select a single datum of the appropriate type
*/
public inline fun <reified R : Any> DataSet<*>.select(namePattern: Name? = null): DataSet<R> =
select(typeOf<R>(), namePattern)
public suspend fun <R : Any> DataSet<*>.selectOne(type: KType, name: Name): NamedData<R>? =
getData(name)?.castOrNull<R>(type)?.named(name)
public suspend inline fun <reified R : Any> DataSet<*>.selectOne(name: Name): NamedData<R>? = selectOne(typeOf<R>(), name)
public suspend inline fun <reified R : Any> DataSet<*>.selectOne(name: String): NamedData<R>? =
selectOne(typeOf<R>(), Name.parse(name))

View File

@ -1,42 +1,50 @@
package space.kscience.dataforge.data
import kotlinx.coroutines.runBlocking
import kotlinx.coroutines.ExperimentalCoroutinesApi
import kotlinx.coroutines.delay
import kotlinx.coroutines.test.runTest
import org.junit.jupiter.api.Test
import space.kscience.dataforge.actions.Action
import space.kscience.dataforge.actions.invoke
import space.kscience.dataforge.actions.map
import space.kscience.dataforge.misc.DFExperimental
import kotlin.test.assertEquals
@Suppress("EXPERIMENTAL_API_USAGE")
class ActionsTest {
val data: DataTree<Int> = runBlocking {
DataTree {
@OptIn(DFExperimental::class, ExperimentalCoroutinesApi::class)
internal class ActionsTest {
@Test
fun testStaticMapAction() = runTest {
val data: DataTree<Int> = DataTree {
repeat(10) {
static(it.toString(), it)
}
}
}
@Test
fun testStaticMapAction() {
val plusOne = Action.map<Int, Int> {
result { it + 1 }
}
runBlocking {
val result = plusOne.execute(data)
assertEquals(2, result.getData("1")?.await())
}
val result = plusOne(data)
assertEquals(2, result["1"]?.await())
}
@Test
fun testDynamicMapAction() {
fun testDynamicMapAction() = runTest {
val data: DataSourceBuilder<Int> = DataSource()
val plusOne = Action.map<Int, Int> {
result { it + 1 }
}
val datum = runBlocking {
val result = plusOne.execute(data, scope = this)
result.getData("1")?.await()
val result = plusOne(data)
repeat(10) {
data.static(it.toString(), it)
}
assertEquals(2, datum)
delay(20)
assertEquals(2, result["1"]?.await())
data.close()
}
}

View File

@ -1,7 +1,6 @@
package space.kscience.dataforge.data
import kotlinx.coroutines.*
import kotlinx.coroutines.flow.collect
import space.kscience.dataforge.misc.DFExperimental
import space.kscience.dataforge.names.asName
import kotlin.test.Test
@ -20,10 +19,10 @@ internal class DataTreeBuilderTest {
static("c.f", "c.f")
}
runBlocking {
assertEquals("a", node.getData("primary.a")?.await())
assertEquals("b", node.getData("primary.b")?.await())
assertEquals("c.d", node.getData("c.d")?.await())
assertEquals("c.f", node.getData("c.f")?.await())
assertEquals("a", node["primary.a"]?.await())
assertEquals("b", node["primary.b"]?.await())
assertEquals("c.d", node["c.d"]?.await())
assertEquals("c.f", node["c.f"]?.await())
}
}
@ -43,12 +42,12 @@ internal class DataTreeBuilderTest {
static("b", "b")
}
static("root", "root")
populate(updateData)
populateFrom(updateData)
}
runBlocking {
assertEquals("a", node.getData("update.a")?.await())
assertEquals("a", node.getData("primary.a")?.await())
assertEquals("a", node["update.a"]?.await())
assertEquals("a", node["primary.a"]?.await())
}
}
@ -57,7 +56,7 @@ internal class DataTreeBuilderTest {
try {
lateinit var updateJob: Job
supervisorScope {
val subNode = ActiveDataTree<Int> {
val subNode = DataSource<Int> {
updateJob = launch {
repeat(10) {
delay(10)
@ -71,8 +70,8 @@ internal class DataTreeBuilderTest {
println(it)
}
}
val rootNode = ActiveDataTree<Int> {
setAndObserve("sub".asName(), subNode)
val rootNode = DataSource<Int> {
setAndWatch("sub".asName(), subNode)
}
launch {
@ -81,11 +80,11 @@ internal class DataTreeBuilderTest {
}
}
updateJob.join()
assertEquals(9, rootNode.getData("sub.value")?.await())
assertEquals(9, rootNode["sub.value"]?.await())
cancel()
}
} catch (t: Throwable) {
if (t !is CancellationException) throw t
if (t !is CancellationException) throw t
}
}

23
dataforge-io/README.md Normal file
View File

@ -0,0 +1,23 @@
# Module dataforge-io
IO module
## Usage
## Artifact:
The Maven coordinates of this project are `space.kscience:dataforge-io:0.7.0`.
**Gradle Kotlin DSL:**
```kotlin
repositories {
maven("https://repo.kotlin.link")
//uncomment to access development builds
//maven("https://maven.pkg.jetbrains.space/spc/p/sci/dev")
mavenCentral()
}
dependencies {
implementation("space.kscience:dataforge-io:0.7.0")
}
```

View File

@ -1,29 +1,26 @@
plugins {
id("ru.mipt.npm.gradle.mpp")
id("ru.mipt.npm.gradle.native")
id("space.kscience.gradle.mpp")
}
description = "IO module"
val ioVersion = "0.2.1"
kscience {
useSerialization(sourceSet = ru.mipt.npm.gradle.DependencySourceSet.TEST) {
jvm()
js()
native()
useSerialization()
useSerialization(sourceSet = space.kscience.gradle.DependencySourceSet.TEST) {
cbor()
}
}
//val ioVersion by rootProject.extra("0.2.0-npm-dev-11")
kotlin {
sourceSets {
commonMain {
dependencies {
api(project(":dataforge-context"))
api("io.ktor:ktor-io:${ru.mipt.npm.gradle.KScienceVersions.ktorVersion}")
}
}
dependencies {
api(projects.dataforgeContext)
api("org.jetbrains.kotlinx:kotlinx-io-core:$ioVersion")
api("org.jetbrains.kotlinx:kotlinx-io-bytestring:$ioVersion")
}
}
readme{
maturity = ru.mipt.npm.gradle.Maturity.PROTOTYPE
maturity = space.kscience.gradle.Maturity.EXPERIMENTAL
}

View File

@ -0,0 +1,23 @@
# Module dataforge-io-yaml
YAML meta IO
## Usage
## Artifact:
The Maven coordinates of this project are `space.kscience:dataforge-io-yaml:0.7.0`.
**Gradle Kotlin DSL:**
```kotlin
repositories {
maven("https://repo.kotlin.link")
//uncomment to access development builds
//maven("https://maven.pkg.jetbrains.space/spc/p/sci/dev")
mavenCentral()
}
dependencies {
implementation("space.kscience:dataforge-io-yaml:0.7.0")
}
```

View File

@ -1,32 +1,23 @@
plugins {
id("ru.mipt.npm.gradle.mpp")
// id("ru.mipt.npm.gradle.native")
id("space.kscience.gradle.mpp")
}
description = "YAML meta IO"
kscience {
useSerialization{
yamlKt("0.9.0-dev-1")
jvm()
js()
native()
dependencies {
api(projects.dataforgeIo)
}
}
repositories{
maven("https://dl.bintray.com/mamoe/yamlkt")
}
kotlin {
sourceSets {
commonMain{
dependencies {
api(project(":dataforge-io"))
}
}
useSerialization{
yamlKt()
}
}
readme{
maturity = ru.mipt.npm.gradle.Maturity.PROTOTYPE
maturity = space.kscience.gradle.Maturity.PROTOTYPE
description ="""
YAML meta converters and Front Matter envelope format
""".trimIndent()

View File

@ -1,123 +1,97 @@
package space.kscience.dataforge.io.yaml
import io.ktor.utils.io.core.Input
import io.ktor.utils.io.core.Output
import io.ktor.utils.io.core.readBytes
import io.ktor.utils.io.core.readUTF8Line
import kotlinx.io.Sink
import kotlinx.io.Source
import kotlinx.io.bytestring.ByteString
import kotlinx.io.bytestring.encodeToByteString
import kotlinx.io.readByteString
import kotlinx.io.writeString
import space.kscience.dataforge.context.Context
import space.kscience.dataforge.context.Global
import space.kscience.dataforge.io.*
import space.kscience.dataforge.io.IOFormat.Companion.META_KEY
import space.kscience.dataforge.io.IOFormat.Companion.NAME_KEY
import space.kscience.dataforge.meta.Meta
import space.kscience.dataforge.misc.DFExperimental
import space.kscience.dataforge.names.Name
import space.kscience.dataforge.names.plus
@DFExperimental
public class FrontMatterEnvelopeFormat(
private val io: IOPlugin,
private val meta: Meta = Meta.EMPTY,
private val metaFormatFactory: MetaFormatFactory = YamlMetaFormat,
) : EnvelopeFormat {
override fun readPartial(input: Input): PartialEnvelope {
var line: String
var offset = 0u
do {
line = input.readUTF8Line() ?: error("Input does not contain front matter separator")
offset += line.encodeToByteArray().size.toUInt()
} while (!line.startsWith(space.kscience.dataforge.io.yaml.FrontMatterEnvelopeFormat.Companion.SEPARATOR))
override fun readFrom(binary: Binary): Envelope = binary.read {
var offset = 0
val readMetaFormat =
space.kscience.dataforge.io.yaml.FrontMatterEnvelopeFormat.Companion.metaTypeRegex.matchEntire(line)?.groupValues?.first()
?.let { io.resolveMetaFormat(it) } ?: space.kscience.dataforge.io.yaml.YamlMetaFormat
offset += discardWithSeparator(
SEPARATOR,
atMost = 1024,
)
//TODO replace by preview
val meta = Binary {
do {
line = input.readSafeUtf8Line()
writeUtf8String(line + "\r\n")
offset += line.encodeToByteArray().size.toUInt()
} while (!line.startsWith(space.kscience.dataforge.io.yaml.FrontMatterEnvelopeFormat.Companion.SEPARATOR))
}.read {
readMetaFormat.readMeta(input)
val line = ByteArray {
offset += readWithSeparatorTo(this, "\n".encodeToByteString())
}.decodeToString()
val readMetaFormat = line.trim().takeIf { it.isNotBlank() }?.let { io.resolveMetaFormat(it) } ?: YamlMetaFormat
val packet = ByteArray {
offset += readWithSeparatorTo(this, SEPARATOR)
}
return PartialEnvelope(meta, offset, null)
offset += discardLine()
val meta = readMetaFormat.readFrom(packet.asBinary())
Envelope(meta, binary.view(offset))
}
override fun readObject(input: Input): Envelope {
var line: String
do {
line = input.readSafeUtf8Line() //?: error("Input does not contain front matter separator")
} while (!line.startsWith(space.kscience.dataforge.io.yaml.FrontMatterEnvelopeFormat.Companion.SEPARATOR))
override fun readFrom(source: Source): Envelope = readFrom(source.readBinary())
val readMetaFormat =
space.kscience.dataforge.io.yaml.FrontMatterEnvelopeFormat.Companion.metaTypeRegex.matchEntire(line)?.groupValues?.first()
?.let { io.resolveMetaFormat(it) } ?: space.kscience.dataforge.io.yaml.YamlMetaFormat
val meta = Binary {
do {
writeUtf8String(input.readSafeUtf8Line() + "\r\n")
} while (!line.startsWith(space.kscience.dataforge.io.yaml.FrontMatterEnvelopeFormat.Companion.SEPARATOR))
}.read {
readMetaFormat.readMeta(input)
}
val bytes = input.readBytes()
val data = bytes.asBinary()
return SimpleEnvelope(meta, data)
}
override fun writeEnvelope(
output: Output,
envelope: Envelope,
metaFormatFactory: MetaFormatFactory,
formatMeta: Meta,
override fun writeTo(
sink: Sink,
obj: Envelope,
) {
val metaFormat = metaFormatFactory(formatMeta, this@FrontMatterEnvelopeFormat.io.context)
output.writeRawString("${space.kscience.dataforge.io.yaml.FrontMatterEnvelopeFormat.Companion.SEPARATOR}\r\n")
metaFormat.run { this.writeObject(output, envelope.meta) }
output.writeRawString("${space.kscience.dataforge.io.yaml.FrontMatterEnvelopeFormat.Companion.SEPARATOR}\r\n")
val metaFormat = metaFormatFactory.build(io.context, meta)
val formatSuffix = if (metaFormat is YamlMetaFormat) "" else metaFormatFactory.shortName
sink.writeString("$SEPARATOR${formatSuffix}\r\n")
metaFormat.run { metaFormat.writeTo(sink, obj.meta) }
sink.writeString("$SEPARATOR\r\n")
//Printing data
envelope.data?.let { data ->
output.writeBinary(data)
obj.data?.let { data ->
sink.writeBinary(data)
}
}
override fun toMeta(): Meta = Meta {
NAME_KEY put name.toString()
META_KEY put meta
}
public companion object : EnvelopeFormatFactory {
public const val SEPARATOR: String = "---"
public val SEPARATOR: ByteString = "---".encodeToByteString()
private val metaTypeRegex = "---(\\w*)\\s*".toRegex()
override fun invoke(meta: Meta, context: Context): EnvelopeFormat {
return space.kscience.dataforge.io.yaml.FrontMatterEnvelopeFormat(context.io, meta)
override val name: Name = EnvelopeFormatFactory.ENVELOPE_FACTORY_NAME + "frontMatter"
override fun build(context: Context, meta: Meta): EnvelopeFormat {
return FrontMatterEnvelopeFormat(context.io, meta)
}
override fun peekFormat(io: IOPlugin, binary: Binary): EnvelopeFormat? = binary.read {
val line = readSafeUtf8Line()
return@read if (line.startsWith("---")) {
space.kscience.dataforge.io.yaml.FrontMatterEnvelopeFormat.Companion.invoke()
//read raw string to avoid UTF issues
val line = readByteString(3)
return@read if (line == "---".encodeToByteString()) {
default
} else {
null
}
}
private val default by lazy { space.kscience.dataforge.io.yaml.FrontMatterEnvelopeFormat.Companion.invoke() }
private val default by lazy { build(Global, Meta.EMPTY) }
override fun readPartial(input: Input): PartialEnvelope =
space.kscience.dataforge.io.yaml.FrontMatterEnvelopeFormat.Companion.default.readPartial(input)
override fun readFrom(binary: Binary): Envelope = default.readFrom(binary)
override fun writeEnvelope(
output: Output,
envelope: Envelope,
metaFormatFactory: MetaFormatFactory,
formatMeta: Meta,
): Unit = space.kscience.dataforge.io.yaml.FrontMatterEnvelopeFormat.Companion.default.writeEnvelope(output, envelope, metaFormatFactory, formatMeta)
override fun writeTo(
sink: Sink,
obj: Envelope,
): Unit = default.writeTo(sink, obj)
override fun readObject(input: Input): Envelope = space.kscience.dataforge.io.yaml.FrontMatterEnvelopeFormat.Companion.default.readObject(input)
override fun readFrom(source: Source): Envelope = default.readFrom(source)
}
}

View File

@ -1,26 +1,18 @@
package space.kscience.dataforge.io.yaml
import io.ktor.utils.io.core.Input
import io.ktor.utils.io.core.Output
import kotlinx.io.Sink
import kotlinx.io.Source
import kotlinx.io.readString
import kotlinx.io.writeString
import net.mamoe.yamlkt.*
import space.kscience.dataforge.context.Context
import space.kscience.dataforge.io.IOFormat.Companion.META_KEY
import space.kscience.dataforge.io.IOFormat.Companion.NAME_KEY
import space.kscience.dataforge.io.MetaFormat
import space.kscience.dataforge.io.MetaFormatFactory
import space.kscience.dataforge.io.readUtf8String
import space.kscience.dataforge.io.writeUtf8String
import space.kscience.dataforge.meta.Meta
import space.kscience.dataforge.meta.*
import space.kscience.dataforge.meta.descriptors.MetaDescriptor
import space.kscience.dataforge.meta.descriptors.get
import space.kscience.dataforge.meta.isLeaf
import space.kscience.dataforge.misc.DFExperimental
import space.kscience.dataforge.names.NameToken
import space.kscience.dataforge.names.withIndex
import space.kscience.dataforge.values.ListValue
import space.kscience.dataforge.values.Null
import space.kscience.dataforge.values.Value
import space.kscience.dataforge.values.parseValue
import kotlin.collections.component1
import kotlin.collections.component2
import kotlin.collections.set
@ -40,7 +32,7 @@ public fun Meta.toYaml(): YamlMap {
private class YamlMeta(private val yamlMap: YamlMap, private val descriptor: MetaDescriptor? = null) : Meta {
override val value: Value?
get() = yamlMap.getStringOrNull(null)?.parseValue()
get() = yamlMap.getStringOrNull(null)?.let { Value.parse(it) }
private fun buildItems(): Map<NameToken, Meta> {
val map = LinkedHashMap<NameToken, Meta>()
@ -51,13 +43,13 @@ private class YamlMeta(private val yamlMap: YamlMap, private val descriptor: Met
val token = NameToken(stringKey)
when (value) {
YamlNull -> Meta(Null)
is YamlLiteral -> map[token] = Meta(value.content.parseValue())
is YamlLiteral -> map[token] = Meta(Value.parse(value.content))
is YamlMap -> map[token] = value.toMeta()
is YamlList -> if (value.all { it is YamlLiteral }) {
val listValue = ListValue(
value.map {
//We already checked that all values are primitives
(it as YamlLiteral).content.parseValue()
Value.parse((it as YamlLiteral).content)
}
)
map[token] = Meta(listValue)
@ -83,7 +75,7 @@ private class YamlMeta(private val yamlMap: YamlMap, private val descriptor: Met
public fun YamlElement.toMeta(descriptor: MetaDescriptor? = null): Meta = when (this) {
YamlNull -> Meta(Null)
is YamlLiteral -> Meta(content.parseValue())
is YamlLiteral -> Meta(Value.parse(content))
is YamlMap -> toMeta()
//We can't return multiple items therefore we create top level node
is YamlList -> YamlMap(mapOf("@yamlArray" to this)).toMeta(descriptor)
@ -95,38 +87,32 @@ public fun YamlMap.toMeta(): Meta = YamlMeta(this)
/**
* Represent meta as Yaml
*/
@DFExperimental
public class YamlMetaFormat(private val meta: Meta) : MetaFormat {
override fun writeMeta(output: Output, meta: Meta, descriptor: MetaDescriptor?) {
val yaml = meta.toYaml()
val string = Yaml.encodeToString(yaml)
output.writeUtf8String(string)
override fun writeMeta(sink: Sink, meta: Meta, descriptor: MetaDescriptor?) {
val yaml: YamlMap = meta.toYaml()
val string = Yaml.encodeToString(YamlMap.serializer(), yaml)
sink.writeString(string)
}
override fun readMeta(input: Input, descriptor: MetaDescriptor?): Meta {
val yaml = Yaml.decodeYamlMapFromString(input.readUtf8String())
override fun readMeta(source: Source, descriptor: MetaDescriptor?): Meta {
val yaml = Yaml.decodeYamlMapFromString(source.readString())
return yaml.toMeta()
}
override fun toMeta(): Meta = Meta {
NAME_KEY put space.kscience.dataforge.io.yaml.FrontMatterEnvelopeFormat.name.toString()
META_KEY put meta
}
public companion object : MetaFormatFactory {
override fun invoke(meta: Meta, context: Context): MetaFormat = YamlMetaFormat(meta)
override fun build(context: Context, meta: Meta): MetaFormat = YamlMetaFormat(meta)
override val shortName: String = "yaml"
override val key: Short = 0x594d //YM
private val default = YamlMetaFormat()
private val default = YamlMetaFormat(Meta.EMPTY)
override fun writeMeta(output: Output, meta: Meta, descriptor: MetaDescriptor?): Unit =
default.writeMeta(output, meta, descriptor)
override fun writeMeta(sink: Sink, meta: Meta, descriptor: MetaDescriptor?): Unit =
default.writeMeta(sink, meta, descriptor)
override fun readMeta(input: Input, descriptor: MetaDescriptor?): Meta =
default.readMeta(input, descriptor)
override fun readMeta(source: Source, descriptor: MetaDescriptor?): Meta =
default.readMeta(source, descriptor)
}
}

View File

@ -0,0 +1,30 @@
package space.kscience.dataforge.io.yaml
import space.kscience.dataforge.context.AbstractPlugin
import space.kscience.dataforge.context.Context
import space.kscience.dataforge.context.PluginFactory
import space.kscience.dataforge.context.PluginTag
import space.kscience.dataforge.io.EnvelopeFormatFactory
import space.kscience.dataforge.io.IOPlugin
import space.kscience.dataforge.io.MetaFormatFactory
import space.kscience.dataforge.meta.Meta
import space.kscience.dataforge.names.Name
import space.kscience.dataforge.names.asName
public class YamlPlugin(meta: Meta) : AbstractPlugin(meta) {
public val io: IOPlugin by require(IOPlugin)
override val tag: PluginTag get() = Companion.tag
override fun content(target: String): Map<Name, Any> = when (target) {
MetaFormatFactory.META_FORMAT_TYPE -> mapOf("yaml".asName() to YamlMetaFormat)
EnvelopeFormatFactory.ENVELOPE_FORMAT_TYPE -> mapOf(FrontMatterEnvelopeFormat.name to FrontMatterEnvelopeFormat)
else -> super.content(target)
}
public companion object : PluginFactory<YamlPlugin> {
override val tag: PluginTag = PluginTag("io.yaml", group = PluginTag.DATAFORGE_GROUP)
override fun build(context: Context, meta: Meta): YamlPlugin = YamlPlugin(meta)
}
}

View File

@ -0,0 +1,37 @@
@file:OptIn(DFExperimental::class)
package space.kscience.dataforge.io.yaml
import space.kscience.dataforge.context.Context
import space.kscience.dataforge.io.io
import space.kscience.dataforge.io.readEnvelope
import space.kscience.dataforge.io.toByteArray
import space.kscience.dataforge.meta.get
import space.kscience.dataforge.meta.string
import space.kscience.dataforge.misc.DFExperimental
import kotlin.test.Test
import kotlin.test.assertEquals
internal class FrontMatterEnvelopeFormatTest {
val context = Context {
plugin(YamlPlugin)
}
@Test
fun frontMatter(){
val text = """
---
content_type: magprog
magprog_section: contacts
section_title: Контакты
language: ru
---
Some text here
""".trimIndent()
val envelope = context.io.readEnvelope(text)
assertEquals("Some text here", envelope.data!!.toByteArray().decodeToString().trim())
assertEquals("magprog", envelope.meta["content_type"].string)
}
}

View File

@ -1,6 +1,9 @@
package space.kscience.dataforge.io
import io.ktor.utils.io.core.*
import kotlinx.io.Sink
import kotlinx.io.Source
import kotlinx.io.buffered
import kotlinx.io.readByteArray
import kotlin.math.min
/**
@ -12,11 +15,20 @@ public interface Binary {
public val size: Int
/**
* Read maximum of [atMost] bytes as input from the binary, starting at [offset]. The generated input is always closed
* when leaving scope, so it could not be leaked outside of scope of [block].
*/
public fun <R> read(offset: Int = 0, atMost: Int = size - offset, block: Input.() -> R): R
public fun <R> read(offset: Int = 0, atMost: Int = size - offset, block: Source.() -> R): R
public suspend fun <R> readSuspend(offset: Int = 0, atMost: Int = size - offset, block: suspend Source.() -> R): R
/**
* Read a binary with given [offset] relative to this binary and given [binarySize].
* In general, the resulting binary is of the same type as this one, but it is not guaranteed.
*/
public fun view(offset: Int, binarySize: Int = size - offset): Binary
public companion object {
public val EMPTY: Binary = ByteArrayBinary(ByteArray(0))
@ -29,46 +41,67 @@ internal class ByteArrayBinary(
override val size: Int = array.size - start,
) : Binary {
override fun <R> read(offset: Int, atMost: Int, block: Input.() -> R): R {
override fun <R> read(offset: Int, atMost: Int, block: Source.() -> R): R {
require(offset >= 0) { "Offset must be positive" }
require(offset < array.size) { "Offset $offset is larger than array size" }
val input = ByteReadPacket(
return ByteArraySource(
array,
offset + start,
min(atMost, size - offset)
)
return input.use(block)
).buffered().use(block)
}
override suspend fun <R> readSuspend(offset: Int, atMost: Int, block: suspend Source.() -> R): R {
require(offset >= 0) { "Offset must be positive" }
require(offset < array.size) { "Offset $offset is larger than array size" }
val input = ByteArraySource(
array,
offset + start,
min(atMost, size - offset)
).buffered()
return try {
block(input)
} finally {
input.close()
}
}
override fun view(offset: Int, binarySize: Int): ByteArrayBinary =
ByteArrayBinary(array, start + offset, binarySize)
}
public fun ByteArray.asBinary(): Binary = ByteArrayBinary(this)
/**
* Produce a [buildByteArray] representing an exact copy of this [Binary]
* Produce a [ByteArray] representing an exact copy of this [Binary]
*/
public fun Binary.toByteArray(): ByteArray = if (this is ByteArrayBinary) {
array.copyOf() // TODO do we need to ensure data safety here?
array.copyOfRange(start, start + size) // TODO do we need to ensure data safety here?
} else {
read {
readBytes()
readByteArray()
}
}
public fun Input.readBinary(size: Int): Binary {
val array = readBytes(size)
//TODO optimize for file-based Inputs
public fun Source.readBinary(size: Int? = null): Binary {
val array = if (size == null) readByteArray() else readByteArray(size)
return ByteArrayBinary(array)
}
/**
* Direct write of binary to the output. Returns the number of bytes written
*/
public fun Output.writeBinary(binary: Binary): Int {
public fun Sink.writeBinary(binary: Binary): Int {
return if (binary is ByteArrayBinary) {
writeFully(binary.array, binary.start, binary.start + binary.size)
write(binary.array, binary.start, binary.start + binary.size)
binary.size
} else {
binary.read {
copyTo(this@writeBinary).toInt()
transferTo(this@writeBinary).toInt()
}
}
}

View File

@ -34,7 +34,9 @@ public interface Envelope {
}
}
public class SimpleEnvelope(override val meta: Meta, override val data: Binary?) : Envelope
internal class SimpleEnvelope(override val meta: Meta, override val data: Binary?) : Envelope
public fun Envelope(meta: Meta, data: Binary?): Envelope = SimpleEnvelope(meta, data)
/**
* The purpose of the envelope

View File

@ -1,6 +1,6 @@
package space.kscience.dataforge.io
import io.ktor.utils.io.core.Output
import kotlinx.io.Sink
import space.kscience.dataforge.meta.*
public class EnvelopeBuilder : Envelope {
@ -33,8 +33,8 @@ public class EnvelopeBuilder : Envelope {
/**
* Construct a data binary from given builder
*/
public fun data(block: Output.() -> Unit) {
data = buildByteArray { block() }.asBinary()
public inline fun data(block: Sink.() -> Unit) {
data = ByteArray { block() }.asBinary()
}
public fun seal(): Envelope = SimpleEnvelope(metaBuilder.seal(), data)

View File

@ -1,48 +1,27 @@
package space.kscience.dataforge.io
import io.ktor.utils.io.core.Input
import io.ktor.utils.io.core.Output
import kotlinx.io.Source
import space.kscience.dataforge.context.Context
import space.kscience.dataforge.io.EnvelopeFormatFactory.Companion.ENVELOPE_FORMAT_TYPE
import space.kscience.dataforge.meta.Meta
import space.kscience.dataforge.misc.Type
import space.kscience.dataforge.misc.DfId
import space.kscience.dataforge.names.Name
import space.kscience.dataforge.names.asName
import kotlin.reflect.KType
import kotlin.reflect.typeOf
/**
* A partially read envelope with meta, but without data
*/
public data class PartialEnvelope(val meta: Meta, val dataOffset: UInt, val dataSize: ULong?)
public interface EnvelopeFormat : IOFormat<Envelope> {
override val type: KType get() = typeOf<Envelope>()
public val defaultMetaFormat: MetaFormatFactory get() = JsonMetaFormat
public fun readPartial(input: Input): PartialEnvelope
public fun writeEnvelope(
output: Output,
envelope: Envelope,
metaFormatFactory: MetaFormatFactory = defaultMetaFormat,
formatMeta: Meta = Meta.EMPTY,
)
override fun readObject(input: Input): Envelope
override fun writeObject(output: Output, obj: Envelope): Unit = writeEnvelope(output, obj)
}
public fun EnvelopeFormat.read(input: Input): Envelope = readObject(input)
public fun EnvelopeFormat.read(input: Source): Envelope = readFrom(input)
@Type(ENVELOPE_FORMAT_TYPE)
@DfId(ENVELOPE_FORMAT_TYPE)
public interface EnvelopeFormatFactory : IOFormatFactory<Envelope>, EnvelopeFormat {
override val name: Name get() = "envelope".asName()
override val type: KType get() = typeOf<Envelope>()
override fun invoke(meta: Meta, context: Context): EnvelopeFormat
override fun build(context: Context, meta: Meta): EnvelopeFormat
/**
* Try to infer specific format from input and return null if the attempt is failed.
@ -51,6 +30,7 @@ public interface EnvelopeFormatFactory : IOFormatFactory<Envelope>, EnvelopeForm
public fun peekFormat(io: IOPlugin, binary: Binary): EnvelopeFormat?
public companion object {
public val ENVELOPE_FACTORY_NAME: Name = "envelope".asName()
public const val ENVELOPE_FORMAT_TYPE: String = "io.format.envelope"
}
}

View File

@ -1,11 +1,13 @@
package space.kscience.dataforge.io
import kotlinx.io.bytestring.ByteString
import kotlinx.io.bytestring.decodeToString
import kotlinx.io.write
import space.kscience.dataforge.io.Envelope.Companion.ENVELOPE_NODE_KEY
import space.kscience.dataforge.io.PartDescriptor.Companion.DEFAULT_MULTIPART_DATA_SEPARATOR
import space.kscience.dataforge.io.PartDescriptor.Companion.MULTIPART_DATA_TYPE
import space.kscience.dataforge.io.PartDescriptor.Companion.MULTIPART_KEY
import space.kscience.dataforge.io.PartDescriptor.Companion.PARTS_KEY
import space.kscience.dataforge.io.PartDescriptor.Companion.PART_FORMAT_KEY
import space.kscience.dataforge.io.PartDescriptor.Companion.SEPARATOR_KEY
import space.kscience.dataforge.meta.*
import space.kscience.dataforge.names.asName
@ -21,9 +23,7 @@ private class PartDescriptor : Scheme() {
val PARTS_KEY = MULTIPART_KEY + "parts"
val SEPARATOR_KEY = MULTIPART_KEY + "separator"
const val DEFAULT_MULTIPART_DATA_SEPARATOR = "\r\n#~PART~#\r\n"
val PART_FORMAT_KEY = "format".asName()
val DEFAULT_MULTIPART_DATA_SEPARATOR = "\r\n#~PART~#\r\n".toAsciiByteString()
const val MULTIPART_DATA_TYPE = "envelope.multipart"
}
@ -35,12 +35,12 @@ public typealias EnvelopeParts = List<EnvelopePart>
public fun EnvelopeBuilder.multipart(
parts: EnvelopeParts,
separator: String = DEFAULT_MULTIPART_DATA_SEPARATOR
separator: ByteString = DEFAULT_MULTIPART_DATA_SEPARATOR,
) {
dataType = MULTIPART_DATA_TYPE
var offsetCounter = 0
val separatorSize = separator.length
val separatorSize = separator.size
val partDescriptors = parts.map { (binary, description) ->
offsetCounter += separatorSize
PartDescriptor {
@ -54,31 +54,30 @@ public fun EnvelopeBuilder.multipart(
meta {
if (separator != DEFAULT_MULTIPART_DATA_SEPARATOR) {
SEPARATOR_KEY put separator
SEPARATOR_KEY put separator.decodeToString()
}
setIndexed(PARTS_KEY, partDescriptors.map { it.toMeta() })
}
data {
parts.forEach {
writeRawString(separator)
write(separator)
writeBinary(it.binary)
}
}
}
/**
* Put a list of envelopes as parts of given envelope
*/
public fun EnvelopeBuilder.envelopes(
envelopes: List<Envelope>,
format: EnvelopeFormat = TaggedEnvelopeFormat,
separator: String = DEFAULT_MULTIPART_DATA_SEPARATOR
separator: ByteString = DEFAULT_MULTIPART_DATA_SEPARATOR,
) {
val parts = envelopes.map {
val binary = format.toBinary(it)
val binary = Binary(it, TaggedEnvelopeFormat)
EnvelopePart(binary, null)
}
meta{
set(MULTIPART_KEY + PART_FORMAT_KEY, format.toMeta())
}
multipart(parts, separator)
}
@ -106,14 +105,4 @@ public val EnvelopePart.name: String? get() = description?.get("name").string
/**
* Represent envelope part by an envelope
*/
public fun EnvelopePart.envelope(plugin: IOPlugin): Envelope {
val formatItem = description?.get(PART_FORMAT_KEY)
return if (formatItem != null) {
val format: EnvelopeFormat = plugin.resolveEnvelopeFormat(formatItem)
?: error("Envelope format for $formatItem is not resolved")
binary.readWith(format)
} else {
error("Envelope description not found")
//SimpleEnvelope(description ?: Meta.EMPTY, binary)
}
}
public fun EnvelopePart.envelope(): Envelope = binary.readWith(TaggedEnvelopeFormat)

View File

@ -1,133 +1,118 @@
package space.kscience.dataforge.io
import io.ktor.utils.io.core.*
import kotlinx.io.Sink
import kotlinx.io.Source
import kotlinx.io.readByteArray
import space.kscience.dataforge.context.Context
import space.kscience.dataforge.context.Factory
import space.kscience.dataforge.io.IOFormat.Companion.NAME_KEY
import space.kscience.dataforge.io.IOFormatFactory.Companion.IO_FORMAT_TYPE
import space.kscience.dataforge.meta.Meta
import space.kscience.dataforge.meta.MetaRepr
import space.kscience.dataforge.misc.DfId
import space.kscience.dataforge.misc.Named
import space.kscience.dataforge.misc.Type
import space.kscience.dataforge.names.Name
import space.kscience.dataforge.names.asName
import kotlin.reflect.KType
import kotlin.reflect.typeOf
/**
* And interface for reading and writing objects into with IO streams
* Reader of a custom object from input
*/
public interface IOFormat<T : Any> : MetaRepr {
public interface IOReader<out T> {
/**
* The type of object being read
*/
public val type: KType
public fun writeObject(output: Output, obj: T)
public fun readObject(input: Input): T
public fun readFrom(source: Source): T
public fun readFrom(binary: Binary): T = binary.read { readFrom(this) }
public companion object {
public val NAME_KEY: Name = "name".asName()
public val META_KEY: Name = "meta".asName()
/**
* no-op reader for binaries.
*/
public val binary: IOReader<Binary> = object : IOReader<Binary> {
override val type: KType = typeOf<Binary>()
override fun readFrom(source: Source): Binary = source.readByteArray().asBinary()
override fun readFrom(binary: Binary): Binary = binary
}
}
}
public fun <T : Any> Input.readWith(format: IOFormat<T>): T = format.readObject(this@readWith)
public inline fun <reified T> IOReader(crossinline read: Source.() -> T): IOReader<T> = object : IOReader<T> {
override val type: KType = typeOf<T>()
public fun <T: Any> IOFormat<T>.readObject(binary: Binary): T = binary.read {
readObject(this)
override fun readFrom(source: Source): T = source.read()
}
public fun interface IOWriter<in T> {
public fun writeTo(sink: Sink, obj: T)
}
/**
* Read given binary as object using given format
* And interface for reading and writing objects into with IO streams
*/
public fun <T : Any> Binary.readWith(format: IOFormat<T>): T = read {
public interface IOFormat<T> : IOReader<T>, IOWriter<T>
public fun <T : Any> Source.readWith(format: IOReader<T>): T = format.readFrom(this)
/**
* Read given binary as an object using given format
*/
public fun <T : Any> Binary.readWith(format: IOReader<T>): T = read {
readWith(format)
}
public fun <T : Any> Output.writeWith(format: IOFormat<T>, obj: T): Unit =
format.run { writeObject(this@writeWith, obj) }
/**
* Write an object to the [Sink] with given [format]
*/
public fun <T : Any> Sink.writeWith(format: IOWriter<T>, obj: T): Unit =
format.writeTo(this, obj)
public inline fun <reified T : Any> IOFormat.Companion.listOf(
format: IOFormat<T>,
): IOFormat<List<T>> = object : IOFormat<List<T>> {
override val type: KType = typeOf<List<T>>()
override fun writeObject(output: Output, obj: List<T>) {
output.writeInt(obj.size)
format.run {
obj.forEach {
writeObject(output, it)
}
}
}
override fun readObject(input: Input): List<T> {
val size = input.readInt()
return format.run {
List(size) { readObject(input) }
}
}
override fun toMeta(): Meta = Meta {
NAME_KEY put "list"
"contentFormat" put format.toMeta()
}
}
//public fun ObjectPool<Buffer>.fill(block: Buffer.() -> Unit): Buffer {
// val buffer = borrow()
// return try {
// buffer.apply(block)
// } catch (ex: Exception) {
// //recycle(buffer)
// throw ex
// }
//}
@Type(IO_FORMAT_TYPE)
public interface IOFormatFactory<T : Any> : Factory<IOFormat<T>>, Named, MetaRepr {
@DfId(IO_FORMAT_TYPE)
public interface IOFormatFactory<T : Any> : Factory<IOFormat<T>>, Named {
/**
* Explicit type for dynamic type checks
*/
public val type: KType
override fun toMeta(): Meta = Meta {
NAME_KEY put name.toString()
}
public companion object {
public const val IO_FORMAT_TYPE: String = "io.format"
public val NAME_KEY: Name = "name".asName()
public val META_KEY: Name = "meta".asName()
}
}
public fun <T : Any> IOFormat<T>.toBinary(obj: T): Binary = Binary { writeObject(this, obj) }
public fun <T : Any> Binary(obj: T, format: IOWriter<T>): Binary = Binary { format.writeTo(this, obj) }
public object FloatIOFormat : IOFormat<Float>, IOFormatFactory<Float> {
override fun build(context: Context, meta: Meta): IOFormat<Float> = this
override val name: Name = "float32".asName()
override val type: KType get() = typeOf<Float>()
override fun writeTo(sink: Sink, obj: Float) {
sink.writeFloat(obj)
}
override fun readFrom(source: Source): Float = source.readFloat()
}
public object DoubleIOFormat : IOFormat<Double>, IOFormatFactory<Double> {
override fun invoke(meta: Meta, context: Context): IOFormat<Double> = this
override fun build(context: Context, meta: Meta): IOFormat<Double> = this
override val name: Name = "double".asName()
override val name: Name = "float64".asName()
override val type: KType get() = typeOf<Double>()
override fun writeObject(output: Output, obj: kotlin.Double) {
output.writeDouble(obj)
override fun writeTo(sink: Sink, obj: Double) {
sink.writeLong(obj.toBits())
}
override fun readObject(input: Input): Double = input.readDouble()
}
//public object ValueIOFormat : IOFormat<Value>, IOFormatFactory<Value> {
// override fun invoke(meta: Meta, context: Context): IOFormat<Value> = this
//
// override val name: Name = "value".asName()
//
// override val type: KType get() = typeOf<Value>()
//
// override fun writeObject(output: Output, obj: Value) {
// BinaryMetaFormat.run { output.writeValue(obj) }
// }
//
// override fun readObject(input: Input): Value {
// return (BinaryMetaFormat.run { input.readMetaItem() } as? MetaItemValue)?.value
// ?: error("The item is not a value")
// }
//}
override fun readFrom(source: Source): Double = source.readDouble()
}

View File

@ -2,17 +2,15 @@ package space.kscience.dataforge.io
import space.kscience.dataforge.context.*
import space.kscience.dataforge.io.EnvelopeFormatFactory.Companion.ENVELOPE_FORMAT_TYPE
import space.kscience.dataforge.io.IOFormat.Companion.META_KEY
import space.kscience.dataforge.io.IOFormat.Companion.NAME_KEY
import space.kscience.dataforge.io.IOFormatFactory.Companion.IO_FORMAT_TYPE
import space.kscience.dataforge.io.MetaFormatFactory.Companion.META_FORMAT_TYPE
import space.kscience.dataforge.meta.Meta
import space.kscience.dataforge.meta.get
import space.kscience.dataforge.meta.string
import space.kscience.dataforge.misc.DFInternal
import space.kscience.dataforge.names.Name
import kotlin.native.concurrent.ThreadLocal
import kotlin.reflect.KClass
import kotlin.reflect.KType
import kotlin.reflect.typeOf
public class IOPlugin(meta: Meta) : AbstractPlugin(meta) {
override val tag: PluginTag get() = Companion.tag
@ -21,15 +19,14 @@ public class IOPlugin(meta: Meta) : AbstractPlugin(meta) {
context.gather<IOFormatFactory<*>>(IO_FORMAT_TYPE).values
}
public fun <T : Any> resolveIOFormat(item: Meta, type: KClass<out T>): IOFormat<T>? {
val key = item.string ?: item[NAME_KEY]?.string ?: error("Format name not defined")
val name = Name.parse(key)
return ioFormatFactories.find { it.name == name }?.let {
@Suppress("UNCHECKED_CAST")
if (it.type != type) error("Format type ${it.type} is not the same as requested type $type")
else it.invoke(item[META_KEY] ?: Meta.EMPTY, context) as IOFormat<T>
}
}
@Suppress("UNCHECKED_CAST")
@DFInternal
public fun <T : Any> resolveIOFormat(type: KType, meta: Meta): IOFormat<T>? =
ioFormatFactories.singleOrNull { it.type == type }?.build(context, meta) as? IOFormat<T>
@OptIn(DFInternal::class)
public inline fun <reified T : Any> resolveIOFormat(meta: Meta = Meta.EMPTY): IOFormat<T>? =
resolveIOFormat(typeOf<T>(), meta)
public val metaFormatFactories: Collection<MetaFormatFactory> by lazy {
@ -37,48 +34,53 @@ public class IOPlugin(meta: Meta) : AbstractPlugin(meta) {
}
public fun resolveMetaFormat(key: Short, meta: Meta = Meta.EMPTY): MetaFormat? =
metaFormatFactories.find { it.key == key }?.invoke(meta)
metaFormatFactories.find { it.key == key }?.build(context, meta)
public fun resolveMetaFormat(name: String, meta: Meta = Meta.EMPTY): MetaFormat? =
metaFormatFactories.find { it.shortName == name }?.invoke(meta)
metaFormatFactories.find { it.shortName == name }?.build(context, meta)
public val envelopeFormatFactories: Collection<EnvelopeFormatFactory> by lazy {
context.gather<EnvelopeFormatFactory>(ENVELOPE_FORMAT_TYPE).values
}
private fun resolveEnvelopeFormat(name: Name, meta: Meta = Meta.EMPTY): EnvelopeFormat? =
envelopeFormatFactories.find { it.name == name }?.invoke(meta, context)
envelopeFormatFactories.find { it.name == name }?.build(context, meta)
public fun resolveEnvelopeFormat(item: Meta): EnvelopeFormat? {
val name = item.string ?: item[NAME_KEY]?.string ?: error("Envelope format name not defined")
val meta = item[META_KEY] ?: Meta.EMPTY
val name = item.string ?: item[IOFormatFactory.NAME_KEY]?.string ?: error("Envelope format name not defined")
val meta = item[IOFormatFactory.META_KEY] ?: Meta.EMPTY
return resolveEnvelopeFormat(Name.parse(name), meta)
}
override fun content(target: String): Map<Name, Any> {
return when (target) {
META_FORMAT_TYPE -> defaultMetaFormats.toMap()
ENVELOPE_FORMAT_TYPE -> defaultEnvelopeFormats.toMap()
else -> super.content(target)
}
override fun content(target: String): Map<Name, Any> = when (target) {
META_FORMAT_TYPE -> defaultMetaFormats.associateByName()
ENVELOPE_FORMAT_TYPE -> defaultEnvelopeFormats.associateByName()
IO_FORMAT_TYPE -> content(META_FORMAT_TYPE) + content(ENVELOPE_FORMAT_TYPE)
else -> super.content(target)
}
public companion object : PluginFactory<IOPlugin> {
public val defaultMetaFormats: List<MetaFormatFactory> = listOf(JsonMetaFormat)
public val defaultEnvelopeFormats: List<EnvelopeFormatFactory> =
listOf(TaggedEnvelopeFormat, TaglessEnvelopeFormat)
public val defaultEnvelopeFormats: List<EnvelopeFormatFactory> = listOf(
TaggedEnvelopeFormat,
TaglessEnvelopeFormat
)
override val tag: PluginTag = PluginTag("io", group = PluginTag.DATAFORGE_GROUP)
override val type: KClass<out IOPlugin> = IOPlugin::class
override fun invoke(meta: Meta, context: Context): IOPlugin = IOPlugin(meta)
override fun build(context: Context, meta: Meta): IOPlugin = IOPlugin(meta)
public val WORK_DIRECTORY_KEY: Name = Name.of("io", "workDirectory")
}
}
@ThreadLocal
internal val ioContext = Global.withEnv {
name("IO")
internal val ioContext = Context("IO") {
plugin(IOPlugin)
}
public val Context.io: IOPlugin get() = (if (this == Global) ioContext else this).fetch(IOPlugin)
public val Context.io: IOPlugin
get() = if (this == Global) {
ioContext.request(IOPlugin)
} else {
request(IOPlugin)
}

View File

@ -1,39 +1,30 @@
@file:Suppress("UNUSED_PARAMETER")
package space.kscience.dataforge.io
import io.ktor.utils.io.core.Input
import io.ktor.utils.io.core.Output
import kotlinx.io.Sink
import kotlinx.io.Source
import kotlinx.io.readString
import kotlinx.io.writeString
import kotlinx.serialization.json.Json
import kotlinx.serialization.json.JsonObject
import kotlinx.serialization.json.JsonElement
import space.kscience.dataforge.context.Context
import space.kscience.dataforge.io.IOFormat.Companion.NAME_KEY
import space.kscience.dataforge.meta.Meta
import space.kscience.dataforge.meta.descriptors.MetaDescriptor
import space.kscience.dataforge.meta.toJson
import space.kscience.dataforge.meta.toMeta
import kotlin.reflect.KType
import kotlin.reflect.typeOf
/**
* A Json format for Meta representation
*/
public class JsonMetaFormat(private val json: Json = DEFAULT_JSON) : MetaFormat {
override val type: KType get() = typeOf<Meta>()
override fun writeMeta(output: Output, meta: Meta, descriptor: MetaDescriptor?) {
val jsonObject = meta.toJson(descriptor)
output.writeUtf8String(json.encodeToString(JsonObject.serializer(), jsonObject))
override fun writeMeta(sink: Sink, meta: Meta, descriptor: MetaDescriptor?) {
val jsonElement = meta.toJson(descriptor)
sink.writeString(json.encodeToString(JsonElement.serializer(), jsonElement))
}
override fun toMeta(): Meta = Meta {
NAME_KEY put name.toString()
}
override fun readMeta(input: Input, descriptor: MetaDescriptor?): Meta {
val str = input.readUtf8String()//readByteArray().decodeToString()
override fun readMeta(source: Source, descriptor: MetaDescriptor?): Meta {
val str = source.readString()
val jsonElement = json.parseToJsonElement(str)
return jsonElement.toMeta(descriptor)
}
@ -41,17 +32,17 @@ public class JsonMetaFormat(private val json: Json = DEFAULT_JSON) : MetaFormat
public companion object : MetaFormatFactory {
public val DEFAULT_JSON: Json = Json { prettyPrint = true }
override fun invoke(meta: Meta, context: Context): MetaFormat = default
override fun build(context: Context, meta: Meta): MetaFormat = default
override val shortName: String = "json"
override val key: Short = 0x4a53//"JS"
private val default = JsonMetaFormat()
override fun writeMeta(output: Output, meta: Meta, descriptor: MetaDescriptor?): Unit =
default.run { writeMeta(output, meta, descriptor) }
override fun writeMeta(sink: Sink, meta: Meta, descriptor: MetaDescriptor?): Unit =
default.run { writeMeta(sink, meta, descriptor) }
override fun readMeta(input: Input, descriptor: MetaDescriptor?): Meta =
default.run { readMeta(input, descriptor) }
override fun readMeta(source: Source, descriptor: MetaDescriptor?): Meta =
default.run { readMeta(source, descriptor) }
}
}

View File

@ -1,14 +1,15 @@
package space.kscience.dataforge.io
import io.ktor.utils.io.core.ByteReadPacket
import io.ktor.utils.io.core.Input
import io.ktor.utils.io.core.Output
import io.ktor.utils.io.core.use
import kotlinx.io.Sink
import kotlinx.io.Source
import kotlinx.io.buffered
import space.kscience.dataforge.context.Context
import space.kscience.dataforge.context.Global
import space.kscience.dataforge.io.MetaFormatFactory.Companion.META_FORMAT_TYPE
import space.kscience.dataforge.meta.Meta
import space.kscience.dataforge.meta.descriptors.MetaDescriptor
import space.kscience.dataforge.misc.Type
import space.kscience.dataforge.misc.DfId
import space.kscience.dataforge.names.Name
import space.kscience.dataforge.names.asName
import space.kscience.dataforge.names.plus
@ -19,24 +20,25 @@ import kotlin.reflect.typeOf
* A format for meta serialization
*/
public interface MetaFormat : IOFormat<Meta> {
override val type: KType get() = typeOf<Meta>()
override fun writeObject(output: Output, obj: Meta) {
writeMeta(output, obj, null)
override fun writeTo(sink: Sink, obj: Meta) {
writeMeta(sink, obj, null)
}
override fun readObject(input: Input): Meta = readMeta(input)
override fun readFrom(source: Source): Meta = readMeta(source)
public fun writeMeta(
output: Output,
sink: Sink,
meta: Meta,
descriptor: MetaDescriptor? = null,
)
public fun readMeta(input: Input, descriptor: MetaDescriptor? = null): Meta
public fun readMeta(source: Source, descriptor: MetaDescriptor? = null): Meta
}
@Type(META_FORMAT_TYPE)
@DfId(META_FORMAT_TYPE)
public interface MetaFormatFactory : IOFormatFactory<Meta>, MetaFormat {
public val shortName: String
@ -46,25 +48,23 @@ public interface MetaFormatFactory : IOFormatFactory<Meta>, MetaFormat {
public val key: Short get() = name.hashCode().toShort()
override operator fun invoke(meta: Meta, context: Context): MetaFormat
override fun build(context: Context, meta: Meta): MetaFormat
public companion object {
public const val META_FORMAT_TYPE: String = "io.format.meta"
}
}
public fun Meta.toString(format: MetaFormat): String = buildByteArray {
public fun Meta.toString(format: MetaFormat): String = ByteArray {
format.run {
writeObject(this@buildByteArray, this@toString)
writeTo(this@ByteArray, this@toString)
}
}.decodeToString()
public fun Meta.toString(formatFactory: MetaFormatFactory): String = toString(formatFactory())
public fun Meta.toString(formatFactory: MetaFormatFactory): String = toString(formatFactory.build(Global, Meta.EMPTY))
public fun MetaFormat.parse(str: String): Meta {
return ByteReadPacket(str.encodeToByteArray()).use { readObject(it) }
}
public fun MetaFormat.parse(str: String): Meta = readFrom(StringSource(str).buffered())
public fun MetaFormatFactory.parse(str: String, formatMeta: Meta): Meta = invoke(formatMeta).parse(str)
public fun MetaFormatFactory.parse(str: String, formatMeta: Meta): Meta = build(Global, formatMeta).parse(str)

View File

@ -1,9 +1,9 @@
package space.kscience.dataforge.io
import io.ktor.utils.io.core.*
import kotlinx.io.*
import kotlinx.io.bytestring.decodeToString
import space.kscience.dataforge.context.Context
import space.kscience.dataforge.io.IOFormat.Companion.META_KEY
import space.kscience.dataforge.io.IOFormat.Companion.NAME_KEY
import space.kscience.dataforge.context.Global
import space.kscience.dataforge.meta.Meta
import space.kscience.dataforge.meta.enum
import space.kscience.dataforge.meta.get
@ -19,79 +19,79 @@ import space.kscience.dataforge.names.plus
public class TaggedEnvelopeFormat(
public val io: IOPlugin,
public val version: VERSION = VERSION.DF02,
public val metaFormatFactory: MetaFormatFactory = JsonMetaFormat,
) : EnvelopeFormat {
// private val metaFormat = io.metaFormat(metaFormatKey)
// ?: error("Meta format with key $metaFormatKey could not be resolved in $io")
private fun Tag.toBinary() = Binary(24) {
writeRawString(START_SEQUENCE)
writeRawString(version.name)
private fun Tag.toBinary() = Binary {
write(START_SEQUENCE)
writeString(version.name)
writeShort(metaFormatKey)
writeUInt(metaSize)
when (version) {
VERSION.DF02 -> {
writeUInt(dataSize.toUInt())
}
VERSION.DF03 -> {
writeULong(dataSize)
}
}
writeRawString(END_SEQUENCE)
write(END_SEQUENCE)
}
override fun writeEnvelope(
output: Output,
envelope: Envelope,
metaFormatFactory: MetaFormatFactory,
formatMeta: Meta,
override fun writeTo(
sink: Sink,
obj: Envelope,
) {
val metaFormat = metaFormatFactory.invoke(formatMeta, this@TaggedEnvelopeFormat.io.context)
val metaBytes = metaFormat.toBinary(envelope.meta)
val actualSize: ULong = (envelope.data?.size ?: 0).toULong()
val metaFormat = metaFormatFactory.build(io.context, Meta.EMPTY)
val metaBytes = Binary(obj.meta, metaFormat)
val actualSize: ULong = (obj.data?.size ?: 0).toULong()
val tag = Tag(metaFormatFactory.key, metaBytes.size.toUInt() + 2u, actualSize)
output.writeBinary(tag.toBinary())
output.writeBinary(metaBytes)
output.writeRawString("\r\n")
envelope.data?.let {
output.writeBinary(it)
sink.writeBinary(tag.toBinary())
sink.writeBinary(metaBytes)
sink.writeString("\r\n")
obj.data?.let {
sink.writeBinary(it)
}
}
/**
* Read an envelope from input into memory
*
* @param input an input to read from
* @param source an input to read from
* @param formats a collection of meta formats to resolve
*/
override fun readObject(input: Input): Envelope {
val tag = input.readTag(this.version)
override fun readFrom(source: Source): Envelope {
val tag = source.readTag(this.version)
val metaFormat = io.resolveMetaFormat(tag.metaFormatKey)
?: error("Meta format with key ${tag.metaFormatKey} not found")
val metaBinary = input.readBinary(tag.metaSize.toInt())
val metaBinary = source.readBinary(tag.metaSize.toInt())
val meta: Meta = metaFormat.readObject(metaBinary)
val meta: Meta = metaFormat.readFrom(metaBinary)
val data = input.readBinary(tag.dataSize.toInt())
val data = source.readBinary(tag.dataSize.toInt())
return SimpleEnvelope(meta, data)
}
override fun readPartial(input: Input): PartialEnvelope {
val tag = input.readTag(this.version)
override fun readFrom(binary: Binary): Envelope = binary.read {
val tag = readTag(version)
val metaFormat = io.resolveMetaFormat(tag.metaFormatKey)
?: error("Meta format with key ${tag.metaFormatKey} not found")
val metaBinary = input.readBinary(tag.metaSize.toInt())
val metaBinary = readBinary(tag.metaSize.toInt())
val meta: Meta = metaFormat.readObject(metaBinary)
val meta: Meta = metaFormat.readFrom(metaBinary)
return PartialEnvelope(meta, version.tagSize + tag.metaSize, tag.dataSize)
SimpleEnvelope(meta, binary.view((version.tagSize + tag.metaSize).toInt(), tag.dataSize.toInt()))
}
private data class Tag(
@ -105,20 +105,13 @@ public class TaggedEnvelopeFormat(
DF03(24u)
}
override fun toMeta(): Meta = Meta {
NAME_KEY put name.toString()
META_KEY put {
"version" put version
}
}
public companion object : EnvelopeFormatFactory {
private const val START_SEQUENCE = "#~"
private const val END_SEQUENCE = "~#\r\n"
private val START_SEQUENCE = "#~".toAsciiByteString()
private val END_SEQUENCE = "~#\r\n".toAsciiByteString()
override val name: Name = super.name + "tagged"
override val name: Name = EnvelopeFormatFactory.ENVELOPE_FACTORY_NAME + "tagged"
override fun invoke(meta: Meta, context: Context): EnvelopeFormat {
override fun build(context: Context, meta: Meta): EnvelopeFormat {
val io = context.io
val metaFormatName = meta["name"].string?.let { Name.parse(it) } ?: JsonMetaFormat.name
@ -130,57 +123,48 @@ public class TaggedEnvelopeFormat(
return TaggedEnvelopeFormat(io, version)
}
private fun Input.readTag(version: VERSION): Tag {
val start = readRawString(2)
private fun Source.readTag(version: VERSION): Tag {
val start = readByteString(2)
if (start != START_SEQUENCE) error("The input is not an envelope")
val versionString = readRawString(4)
if (version.name != versionString) error("Wrong version of DataForge: expected $version but found $versionString")
val versionString = readByteString(4)
if (version.name.toAsciiByteString() != versionString) error("Wrong version of DataForge: expected $version but found $versionString")
val metaFormatKey = readShort()
val metaLength = readUInt()
val dataLength: ULong = when (version) {
VERSION.DF02 -> readUInt().toULong()
VERSION.DF03 -> readULong()
}
val end = readRawString(4)
val end = readByteString(4)
if (end != END_SEQUENCE) error("The input is not an envelope")
return Tag(metaFormatKey, metaLength, dataLength)
}
override fun peekFormat(io: IOPlugin, binary: Binary): EnvelopeFormat? {
return try {
binary.read{
val header = readRawString(6)
return@read when (header.substring(2..5)) {
VERSION.DF02.name -> TaggedEnvelopeFormat(io, VERSION.DF02)
VERSION.DF03.name -> TaggedEnvelopeFormat(io, VERSION.DF03)
else -> null
}
override fun peekFormat(io: IOPlugin, binary: Binary): EnvelopeFormat? = try {
binary.read {
val header = readByteString(6)
when (header.substring(2, 6).decodeToString()) {
VERSION.DF02.name -> TaggedEnvelopeFormat(io, VERSION.DF02)
VERSION.DF03.name -> TaggedEnvelopeFormat(io, VERSION.DF03)
else -> null
}
} catch (ex: Exception) {
null
}
} catch (ex: Exception) {
null
}
private val default by lazy { invoke(context = ioContext) }
private val default by lazy { build(Global, Meta.EMPTY) }
override fun readPartial(input: Input): PartialEnvelope =
default.run { readPartial(input) }
override fun readFrom(binary: Binary): Envelope =
default.run { readFrom(binary) }
override fun writeEnvelope(
output: Output,
envelope: Envelope,
metaFormatFactory: MetaFormatFactory,
formatMeta: Meta,
override fun writeTo(
sink: Sink,
obj: Envelope,
): Unit = default.run {
writeEnvelope(
output,
envelope,
metaFormatFactory,
formatMeta
)
writeTo(sink, obj)
}
override fun readObject(input: Input): Envelope = default.readObject(input)
override fun readFrom(source: Source): Envelope = default.readFrom(source)
}
}

View File

@ -1,175 +1,104 @@
package space.kscience.dataforge.io
import io.ktor.utils.io.core.*
import kotlinx.io.*
import kotlinx.io.bytestring.ByteString
import kotlinx.io.bytestring.encodeToByteString
import space.kscience.dataforge.context.Context
import space.kscience.dataforge.io.IOFormat.Companion.META_KEY
import space.kscience.dataforge.io.IOFormat.Companion.NAME_KEY
import space.kscience.dataforge.context.Global
import space.kscience.dataforge.meta.Meta
import space.kscience.dataforge.meta.get
import space.kscience.dataforge.meta.isEmpty
import space.kscience.dataforge.meta.string
import space.kscience.dataforge.names.Name
import space.kscience.dataforge.names.asName
import kotlin.collections.set
import space.kscience.dataforge.names.plus
/**
* A text envelope format with human-readable tag.
* A text envelope format based on block separators.
* TODO add description
*/
public class TaglessEnvelopeFormat(
public val io: IOPlugin,
public val meta: Meta = Meta.EMPTY,
public val metaFormatFactory: MetaFormatFactory = JsonMetaFormat,
) : EnvelopeFormat {
private val metaStart = meta[META_START_PROPERTY].string ?: DEFAULT_META_START
private val dataStart = meta[DATA_START_PROPERTY].string ?: DEFAULT_DATA_START
// private val metaStart = meta[META_START_PROPERTY].string ?: DEFAULT_META_START
// private val dataStart = meta[DATA_START_PROPERTY].string ?: DEFAULT_DATA_START
private fun Output.writeProperty(key: String, value: Any) {
writeFully("#? $key: $value;\r\n".encodeToByteArray())
}
// private fun Output.writeProperty(key: String, value: Any) {
// writeFully("#? $key: $value;\r\n".encodeToByteArray())
// }
override fun writeEnvelope(
output: Output,
envelope: Envelope,
metaFormatFactory: MetaFormatFactory,
formatMeta: Meta
override fun writeTo(
sink: Sink,
obj: Envelope,
) {
val metaFormat = metaFormatFactory(formatMeta, this.io.context)
val metaFormat = metaFormatFactory.build(this.io.context, meta)
//printing header
output.writeRawString(TAGLESS_ENVELOPE_HEADER + "\r\n")
//printing all properties
output.writeProperty(META_TYPE_PROPERTY,
metaFormatFactory.shortName)
//TODO add optional metaFormat properties
val actualSize: Int = envelope.data?.size ?: 0
output.writeProperty(DATA_LENGTH_PROPERTY, actualSize)
sink.write(TAGLESS_ENVELOPE_HEADER)
sink.writeString("\r\n")
//Printing meta
if (!envelope.meta.isEmpty()) {
val metaBytes = metaFormat.toBinary(envelope.meta)
output.writeProperty(META_LENGTH_PROPERTY,
metaBytes.size + 2)
output.writeUtf8String(this.metaStart + "\r\n")
output.writeBinary(metaBytes)
output.writeRawString("\r\n")
if (!obj.meta.isEmpty()) {
val metaBinary = Binary(obj.meta, metaFormat)
sink.writeString(META_START + "-${metaFormatFactory.shortName}\r\n")
sink.writeBinary(metaBinary)
sink.writeString("\r\n")
}
//Printing data
envelope.data?.let { data ->
output.writeUtf8String(this.dataStart + "\r\n")
output.writeBinary(data)
obj.data?.let { data ->
//val actualSize: Int = envelope.data?.size ?: 0
sink.writeString(DATA_START + "\r\n")
sink.writeBinary(data)
}
}
override fun readObject(input: Input): Envelope {
var line: String
do {
line = input.readSafeUtf8Line() // ?: error("Input does not contain tagless envelope header")
} while (!line.startsWith(TAGLESS_ENVELOPE_HEADER))
val properties = HashMap<String, String>()
line = ""
while (line.isBlank() || line.startsWith("#?")) {
if (line.startsWith("#?")) {
val match = propertyPattern.find(line)
?: error("Line $line does not match property declaration pattern")
val (key, value) = match.destructured
properties[key] = value
}
//If can't read line, return envelope without data
if (input.endOfInput) return SimpleEnvelope(Meta.EMPTY, null)
line = input.readSafeUtf8Line()
}
override fun readFrom(source: Source): Envelope {
//read preamble
source.discardWithSeparator(
TAGLESS_ENVELOPE_HEADER,
atMost = 1024,
)
var meta: Meta = Meta.EMPTY
if (line.startsWith(metaStart)) {
val metaFormat = properties[META_TYPE_PROPERTY]?.let { io.resolveMetaFormat(it) } ?: JsonMetaFormat
val metaSize = properties[META_LENGTH_PROPERTY]?.toInt()
meta = if (metaSize != null) {
metaFormat.readObject(input.readBinary(metaSize))
} else {
metaFormat.readObject(input)
var data: Binary? = null
source.discardWithSeparator(
SEPARATOR_PREFIX,
atMost = 1024,
)
var header: String = ByteArray {
source.readWithSeparatorTo(this, "\n".encodeToByteString())
}.decodeToString()
while (!source.exhausted()) {
val block = ByteArray {
source.readWithSeparatorTo(this, SEPARATOR_PREFIX)
}
val nextHeader = ByteArray {
source.readWithSeparatorTo(this, "\n".encodeToByteString())
}.decodeToString()
//terminate on end
if (header.startsWith("END")) break
if (header.startsWith("META")) {
//TODO check format
val metaFormat: MetaFormatFactory = JsonMetaFormat
meta = metaFormat.readMeta(ByteArraySource(block).buffered())
}
if (header.startsWith("DATA")) {
data = block.asBinary()
}
header = nextHeader
}
do {
try {
line = input.readSafeUtf8Line()
} catch (ex: EOFException) {
//returning an Envelope without data if end of input is reached
return SimpleEnvelope(meta, null)
}
} while (!line.startsWith(dataStart))
val data: Binary = if (properties.containsKey(DATA_LENGTH_PROPERTY)) {
input.readBinary(properties[DATA_LENGTH_PROPERTY]!!.toInt())
// val bytes = ByteArray(properties[DATA_LENGTH_PROPERTY]!!.toInt())
// readByteArray(bytes)
// bytes.asBinary()
} else {
Binary {
input.copyTo(this)
}
}
return SimpleEnvelope(meta, data)
}
override fun readPartial(input: Input): PartialEnvelope {
var offset = 0u
var line: String
do {
line = input.readSafeUtf8Line()// ?: error("Input does not contain tagless envelope header")
offset += line.encodeToByteArray().size.toUInt()
} while (!line.startsWith(TAGLESS_ENVELOPE_HEADER))
val properties = HashMap<String, String>()
line = ""
while (line.isBlank() || line.startsWith("#?")) {
if (line.startsWith("#?")) {
val match = propertyPattern.find(line)
?: error("Line $line does not match property declaration pattern")
val (key, value) = match.destructured
properties[key] = value
}
try {
line = input.readSafeUtf8Line()
offset += line.encodeToByteArray().size.toUInt()
} catch (ex: EOFException) {
return PartialEnvelope(Meta.EMPTY, offset.toUInt(), 0.toULong())
}
}
var meta: Meta = Meta.EMPTY
if (line.startsWith(metaStart)) {
val metaFormat = properties[META_TYPE_PROPERTY]?.let { io.resolveMetaFormat(it) } ?: JsonMetaFormat
val metaSize = properties[META_LENGTH_PROPERTY]?.toInt()
meta = if (metaSize != null) {
offset += metaSize.toUInt()
metaFormat.readObject(input.readBinary(metaSize))
} else {
error("Can't partially read an envelope with undefined meta size")
}
}
do {
line = input.readSafeUtf8Line() //?: return PartialEnvelope(Meta.EMPTY, offset.toUInt(), 0.toULong())
offset += line.encodeToByteArray().size.toUInt()
//returning an Envelope without data if end of input is reached
} while (!line.startsWith(dataStart))
val dataSize = properties[DATA_LENGTH_PROPERTY]?.toULong()
return PartialEnvelope(meta, offset, dataSize)
}
override fun toMeta(): Meta = Meta {
NAME_KEY put name.toString()
META_KEY put meta
return Envelope(meta, data)
}
public companion object : EnvelopeFormatFactory {
@ -183,45 +112,41 @@ public class TaglessEnvelopeFormat(
public const val TAGLESS_ENVELOPE_TYPE: String = "tagless"
public const val TAGLESS_ENVELOPE_HEADER: String = "#~DFTL~#"
public const val META_START_PROPERTY: String = "metaSeparator"
public const val DEFAULT_META_START: String = "#~META~#"
public const val DATA_START_PROPERTY: String = "dataSeparator"
public const val DEFAULT_DATA_START: String = "#~DATA~#"
public val SEPARATOR_PREFIX: ByteString = "\n#~".encodeToByteString()
public val TAGLESS_ENVELOPE_HEADER: ByteString = "#~DFTL".encodeToByteString()
// public const val META_START_PROPERTY: String = "metaSeparator"
public const val META_START: String = "#~META"
// public const val DATA_START_PROPERTY: String = "dataSeparator"
public const val DATA_START: String = "#~DATA"
public const val END: String = "#~END"
public const val code: Int = 0x4446544c //DFTL
override val name: Name = TAGLESS_ENVELOPE_TYPE.asName()
override val name: Name = EnvelopeFormatFactory.ENVELOPE_FACTORY_NAME + TAGLESS_ENVELOPE_TYPE
override fun invoke(meta: Meta, context: Context): EnvelopeFormat {
return TaglessEnvelopeFormat(context.io, meta)
}
override fun build(context: Context, meta: Meta): EnvelopeFormat = TaglessEnvelopeFormat(context.io, meta)
private val default by lazy { invoke(context = ioContext) }
private val default by lazy { build(Global, Meta.EMPTY) }
override fun readPartial(input: Input): PartialEnvelope =
default.run { readPartial(input) }
override fun readFrom(binary: Binary): Envelope = default.run { readFrom(binary) }
override fun writeEnvelope(
output: Output,
envelope: Envelope,
metaFormatFactory: MetaFormatFactory,
formatMeta: Meta,
override fun writeTo(
sink: Sink,
obj: Envelope,
): Unit = default.run {
writeEnvelope(
output,
envelope,
metaFormatFactory,
formatMeta
)
writeTo(sink, obj)
}
override fun readObject(input: Input): Envelope = default.readObject(input)
override fun readFrom(source: Source): Envelope = default.readFrom(source)
override fun peekFormat(io: IOPlugin, binary: Binary): EnvelopeFormat? {
return try {
binary.read {
val string = readRawString(TAGLESS_ENVELOPE_HEADER.length)
val string = readByteString(TAGLESS_ENVELOPE_HEADER.size)
return@read if (string == TAGLESS_ENVELOPE_HEADER) {
TaglessEnvelopeFormat(io)
} else {

View File

@ -1,51 +1,235 @@
package space.kscience.dataforge.io
import io.ktor.utils.io.charsets.Charsets
import io.ktor.utils.io.charsets.decodeExactBytes
import io.ktor.utils.io.core.*
import kotlinx.io.*
import kotlinx.io.bytestring.ByteString
import kotlinx.io.bytestring.decodeToString
import kotlinx.io.bytestring.encodeToByteString
import space.kscience.dataforge.meta.Meta
import space.kscience.dataforge.misc.DFExperimental
import kotlin.math.min
public fun Output.writeRawString(str: String) {
writeFully(str.toByteArray(Charsets.ISO_8859_1))
/**
* Convert a string literal, containing only ASCII characters to a [ByteString].
* Throws an error if there are non-ASCII characters.
*/
public fun String.toAsciiByteString(): ByteString {
val bytes = ByteArray(length) {
val char = get(it)
val code = char.code
if (code > Byte.MAX_VALUE) error("Symbol $char is not ASCII symbol") else code.toByte()
}
return ByteString(bytes)
}
public fun Output.writeUtf8String(str: String) {
writeFully(str.encodeToByteArray())
}
public inline fun Buffer(block: Sink.() -> Unit): Buffer = Buffer().apply(block)
@OptIn(ExperimentalIoApi::class)
public fun Input.readRawString(size: Int): String {
return Charsets.ISO_8859_1.newDecoder().decodeExactBytes(this, size)
}
//public fun Source.readSafeUtf8Line(): String = readUTF8Line() ?: error("Line not found")
public fun Input.readUtf8String(): String = readBytes().decodeToString()
public inline fun ByteArray(block: Sink.() -> Unit): ByteArray =
Buffer(block).readByteArray()
public fun Input.readSafeUtf8Line(): String = readUTF8Line() ?: error("Line not found")
public inline fun Binary(block: Sink.() -> Unit): Binary =
ByteArray(block).asBinary()
public inline fun buildByteArray(expectedSize: Int = 16, block: Output.() -> Unit): ByteArray {
val builder = BytePacketBuilder(expectedSize)
builder.block()
return builder.build().readBytes()
}
public inline fun Binary(expectedSize: Int = 16, block: Output.() -> Unit): Binary =
buildByteArray(expectedSize, block).asBinary()
public operator fun Binary.get(range: IntRange): Binary = view(range.first, range.last - range.first)
/**
* View section of a [Binary] as an independent binary
* Return inferred [EnvelopeFormat] if only one format could read given file. If no format accepts the binary, return null. If
* multiple formats accept binary, throw an error.
*/
public class BinaryView(private val source: Binary, private val start: Int, override val size: Int) : Binary {
init {
require(start > 0)
require(start + size <= source.size) { "View boundary is outside source binary size" }
public fun IOPlugin.peekBinaryEnvelopeFormat(binary: Binary): EnvelopeFormat? {
val formats = envelopeFormatFactories.mapNotNull { factory ->
factory.peekFormat(this@peekBinaryEnvelopeFormat, binary)
}
override fun <R> read(offset: Int, atMost: Int, block: Input.() -> R): R {
return source.read(start + offset, min(size, atMost), block)
return when (formats.size) {
0 -> null
1 -> formats.first()
else -> error("Envelope format binary recognition clash: $formats")
}
}
public fun Binary.view(start: Int, size: Int): BinaryView = BinaryView(this, start, size)
/**
* A zero-copy read from
*/
@DFExperimental
public fun IOPlugin.readEnvelope(
binary: Binary,
readNonEnvelopes: Boolean = false,
formatPicker: IOPlugin.(Binary) -> EnvelopeFormat? = IOPlugin::peekBinaryEnvelopeFormat,
): Envelope = formatPicker(binary)?.readFrom(binary) ?: if (readNonEnvelopes) {
// if no format accepts file, read it as binary
Envelope(Meta.EMPTY, binary)
} else error("Can't infer format for $binary")
public operator fun Binary.get(range: IntRange): BinaryView = view(range.first, range.last - range.first)
@DFExperimental
public fun IOPlugin.readEnvelope(
string: String,
readNonEnvelopes: Boolean = false,
formatPicker: IOPlugin.(Binary) -> EnvelopeFormat? = IOPlugin::peekBinaryEnvelopeFormat,
): Envelope = readEnvelope(string.encodeToByteArray().asBinary(), readNonEnvelopes, formatPicker)
private class RingByteArray(
private val buffer: ByteArray,
private var startIndex: Int = 0,
var size: Int = 0,
) {
operator fun get(index: Int): Byte {
require(index >= 0) { "Index must be positive" }
require(index < size) { "Index $index is out of circular buffer size $size" }
return buffer[startIndex.forward(index)]
}
fun isFull(): Boolean = size == buffer.size
fun push(element: Byte) {
buffer[startIndex.forward(size)] = element
if (isFull()) startIndex++ else size++
}
private fun Int.forward(n: Int): Int = (this + n) % (buffer.size)
fun contentEquals(inputArray: ByteArray): Boolean = when {
inputArray.size != buffer.size -> false
size < buffer.size -> false
else -> inputArray.indices.all { inputArray[it] == get(it) }
}
fun contentEquals(byteString: ByteString): Boolean = when {
byteString.size != buffer.size -> false
size < buffer.size -> false
else -> (0 until byteString.size).all { byteString[it] == get(it) }
}
}
private fun RingByteArray.toArray(): ByteArray = ByteArray(size) { get(it) }
/**
* Read [Source] into [output] until designated multibyte [separator] and optionally continues until
* the end of the line after it. Throw error if [separator] not found and [atMost] bytes are read.
* Also fails if [separator] not found until the end of input.
*
* The Separator itself is not read into [Sink].
*
* @param errorOnEof if true error is thrown if separator is never encountered
*
* @return bytes actually being read, including separator
*/
public fun Source.readWithSeparatorTo(
output: Sink?,
separator: ByteString,
atMost: Int = Int.MAX_VALUE,
errorOnEof: Boolean = false,
): Int {
var counter = 0
val rb = RingByteArray(ByteArray(separator.size))
while (!exhausted()) {
val byte = readByte()
counter++
if (counter >= atMost) error("Maximum number of bytes to be read $atMost reached.")
rb.push(byte)
if (rb.contentEquals(separator)) {
return counter
} else if (rb.isFull()) {
output?.writeByte(rb[0])
}
}
if (errorOnEof) {
error("Read to the end of input without encountering ${separator.decodeToString()}")
} else {
for (i in 1 until rb.size) {
output?.writeByte(rb[i])
}
counter += (rb.size - 1)
return counter
}
}
/**
* Discard all bytes until [separator] is encountered. Separator is discarded sa well.
* Return the total number of bytes read.
*/
public fun Source.discardWithSeparator(
separator: ByteString,
atMost: Int = Int.MAX_VALUE,
errorOnEof: Boolean = false,
): Int = readWithSeparatorTo(null, separator, atMost, errorOnEof)
/**
* Discard all symbol until newline is discovered. Carriage return is not discarded.
*/
public fun Source.discardLine(
atMost: Int = Int.MAX_VALUE,
errorOnEof: Boolean = false,
): Int = discardWithSeparator("\n".encodeToByteString(), atMost, errorOnEof)
/**
* A [Source] based on [ByteArray]
*/
internal class ByteArraySource(
private val byteArray: ByteArray,
private val offset: Int = 0,
private val size: Int = byteArray.size - offset,
) : RawSource {
init {
require(offset >= 0) { "Offset must be positive" }
require(offset + size <= byteArray.size) { "End index is ${offset + size}, but the array size is ${byteArray.size}" }
}
private var pointer = offset
override fun close() {
// Do nothing
}
override fun readAtMostTo(sink: Buffer, byteCount: Long): Long {
if (pointer == offset + size) return -1
val byteRead = min(byteCount.toInt(), (size + offset - pointer))
sink.write(byteArray, pointer, pointer + byteRead)
pointer += byteRead
return byteRead.toLong()
}
}
/**
* A [Source] based on [String]
*/
public class StringSource(
public val string: String,
public val offset: Int = 0,
public val size: Int = string.length - offset,
) : RawSource {
private var pointer = offset
override fun close() {
// Do nothing
}
override fun readAtMostTo(sink: Buffer, byteCount: Long): Long {
if (pointer == offset + size) return -1
val byteRead = min(byteCount.toInt(), (size + offset - pointer))
sink.writeString(string, pointer, pointer + byteRead)
pointer += byteRead
return byteRead.toLong()
}
}
public fun Sink.writeDouble(value: Double) {
writeLong(value.toBits())
}
public fun Source.readDouble(): Double = Double.fromBits(readLong())
public fun Sink.writeFloat(value: Float) {
writeInt(value.toBits())
}
public fun Source.readFloat(): Float = Float.fromBits(readInt())

View File

@ -1,6 +1,5 @@
package space.kscience.dataforge.io
import io.ktor.utils.io.core.readInt
import kotlin.test.Test
import kotlin.test.assertEquals

View File

@ -1,7 +1,7 @@
package space.kscience.dataforge.io
import io.ktor.utils.io.core.readDouble
import io.ktor.utils.io.core.writeDouble
import kotlinx.io.readByteArray
import kotlinx.io.writeString
import kotlin.test.Test
import kotlin.test.assertEquals
@ -9,42 +9,56 @@ import kotlin.test.assertEquals
class EnvelopeFormatTest {
val envelope = Envelope {
type = "test.format"
meta{
meta {
"d" put 22.2
}
data{
writeDouble(22.2)
// repeat(2000){
// writeInt(it)
// }
data {
writeString("12345678")
}
}
@Test
fun testTaggedFormat(){
TaggedEnvelopeFormat.run {
val byteArray = writeToByteArray(envelope)
//println(byteArray.decodeToString())
val res = readFromByteArray(byteArray)
assertEquals(envelope.meta,res.meta)
val double = res.data?.read {
readDouble()
}
assertEquals(22.2, double)
fun testTaggedFormat() = with(TaggedEnvelopeFormat) {
val byteArray = writeToByteArray(envelope)
val res = readFromByteArray(byteArray)
assertEquals(envelope.meta, res.meta)
val bytes = res.data?.read {
readByteArray()
}
assertEquals("12345678", bytes?.decodeToString())
}
@Test
fun testTaglessFormat(){
TaglessEnvelopeFormat.run {
val byteArray = writeToByteArray(envelope)
//println(byteArray.decodeToString())
val res = readFromByteArray(byteArray)
assertEquals(envelope.meta,res.meta)
val double = res.data?.read {
readDouble()
}
assertEquals(22.2, double)
fun testTaglessFormat() = with(TaglessEnvelopeFormat) {
val byteArray = writeToByteArray(envelope)
println(byteArray.decodeToString())
val res = readFromByteArray(byteArray)
assertEquals(envelope.meta, res.meta)
val bytes = res.data?.read {
readByteArray()
}
assertEquals("12345678", bytes?.decodeToString())
}
@Test
fun testManualDftl() {
val envelopeString = """
#~DFTL
#~META
{
"@envelope": {
"type": "test.format"
},
"d": 22.2
}
#~DATA
12345678
""".trimIndent()
val res = TaglessEnvelopeFormat.readFromByteArray(envelopeString.encodeToByteArray())
assertEquals(envelope.meta, res.meta)
val bytes = res.data?.read {
readByteArray()
}
assertEquals("12345678", bytes?.decodeToString())
}
}

View File

@ -1,17 +1,58 @@
package space.kscience.dataforge.io
import io.ktor.utils.io.core.ByteReadPacket
import io.ktor.utils.io.core.readBytes
import kotlinx.io.buffered
import kotlinx.io.bytestring.encodeToByteString
import kotlinx.io.readByteArray
import kotlinx.io.readLine
import kotlin.test.Test
import kotlin.test.assertEquals
import kotlin.test.assertFails
class IOTest {
@Test
fun readBytes() {
val bytes = ByteArray(8) { it.toByte() }
val input = ByteReadPacket(bytes)
@Suppress("UNUSED_VARIABLE") val first = input.readBytes(4)
val second = input.readBytes(4)
val input = ByteArraySource(bytes).buffered()
@Suppress("UNUSED_VARIABLE") val first = input.readByteArray(4)
val second = input.readByteArray(4)
assertEquals(4.toByte(), second[0])
}
@Test
fun readUntilSeparator() {
val source = """
aaa
bbb
---
ccc
ddd
""".trimIndent()
val binary = source.encodeToByteArray().asBinary()
binary.read {
val array = ByteArray {
val read = readWithSeparatorTo(this, "---".encodeToByteString()) + discardLine()
assertEquals(12, read)
}
assertEquals("""
aaa
bbb
""".trimIndent(),array.decodeToString().trim())
assertEquals("ccc", readLine()?.trim())
}
assertFails {
binary.read {
discardWithSeparator("---".encodeToByteString(), atMost = 3 )
}
}
assertFails {
binary.read{
discardWithSeparator("-+-".encodeToByteString(), errorOnEof = true)
}
}
}
}

View File

@ -2,18 +2,16 @@ package space.kscience.dataforge.io
import kotlinx.serialization.json.*
import space.kscience.dataforge.meta.*
import space.kscience.dataforge.values.ListValue
import space.kscience.dataforge.values.double
import kotlin.test.Test
import kotlin.test.assertEquals
fun Meta.toByteArray(format: MetaFormat = JsonMetaFormat) = buildByteArray {
format.writeObject(this@buildByteArray, this@toByteArray)
fun Meta.toByteArray(format: MetaFormat = JsonMetaFormat) = ByteArray {
format.writeTo(this@ByteArray, this@toByteArray)
}
fun MetaFormat.fromByteArray(packet: ByteArray): Meta {
return packet.asBinary().read { readObject(this) }
return packet.asBinary().read { readFrom(this) }
}
class MetaFormatTest {

View File

@ -1,5 +1,6 @@
package space.kscience.dataforge.io
import kotlinx.io.writeString
import space.kscience.dataforge.context.Global
import space.kscience.dataforge.meta.get
import space.kscience.dataforge.meta.int
@ -18,32 +19,31 @@ class MultipartTest {
"value" put it
}
data {
writeUtf8String("Hello World $it")
writeString("Hello World $it")
repeat(300) {
writeRawString("$it ")
writeString("$it ")
}
}
}
}
val partsEnvelope = Envelope {
envelopes(envelopes, TaglessEnvelopeFormat)
envelopes(envelopes)
}
@Test
fun testParts() {
TaglessEnvelopeFormat.run {
val singleEnvelopeData = toBinary(envelopes[0])
val singleEnvelopeSize = singleEnvelopeData.size
val bytes = toBinary(partsEnvelope)
assertTrue(envelopes.size * singleEnvelopeSize < bytes.size)
val reconstructed = bytes.readWith(this)
println(reconstructed.meta)
val parts = reconstructed.parts()
val envelope = parts[2].envelope(io)
assertEquals(2, envelope.meta["value"].int)
println(reconstructed.data!!.size)
}
val format = TaggedEnvelopeFormat
val singleEnvelopeData = Binary(envelopes[0], format)
val singleEnvelopeSize = singleEnvelopeData.size
val bytes = Binary(partsEnvelope, format)
assertTrue(envelopes.size * singleEnvelopeSize < bytes.size)
val reconstructed = bytes.readWith(format)
println(reconstructed.meta)
val parts = reconstructed.parts()
val envelope = parts[2].envelope()
assertEquals(2, envelope.meta["value"].int)
println(reconstructed.data!!.size)
}
}

View File

@ -1,12 +1,11 @@
package space.kscience.dataforge.io
import io.ktor.utils.io.core.ByteReadPacket
import io.ktor.utils.io.core.use
import kotlinx.io.buffered
fun <T : Any> IOFormat<T>.writeToByteArray(obj: T): ByteArray = buildByteArray {
writeObject(this, obj)
fun <T : Any> IOFormat<T>.writeToByteArray(obj: T): ByteArray = ByteArray {
writeTo(this, obj)
}
fun <T : Any> IOFormat<T>.readFromByteArray(array: ByteArray): T = ByteReadPacket(array).use {
readObject(it)
fun <T : Any> IOFormat<T>.readFromByteArray(array: ByteArray): T = ByteArraySource(array).buffered().use {
readFrom(it)
}

View File

@ -1,7 +1,11 @@
package space.kscience.dataforge.io
import io.ktor.utils.io.core.*
import io.ktor.utils.io.streams.asOutput
import kotlinx.coroutines.runBlocking
import kotlinx.io.Sink
import kotlinx.io.Source
import kotlinx.io.asSink
import kotlinx.io.buffered
import space.kscience.dataforge.meta.Meta
import space.kscience.dataforge.meta.descriptors.MetaDescriptor
import space.kscience.dataforge.meta.isEmpty
@ -9,7 +13,6 @@ import space.kscience.dataforge.misc.DFExperimental
import java.nio.file.Files
import java.nio.file.Path
import java.nio.file.StandardOpenOption
import kotlin.io.path.ExperimentalPathApi
import kotlin.io.path.inputStream
import kotlin.math.min
import kotlin.reflect.full.isSupertypeOf
@ -23,76 +26,104 @@ internal class PathBinary(
override val size: Int = Files.size(path).toInt() - fileOffset,
) : Binary {
@OptIn(ExperimentalPathApi::class)
override fun <R> read(offset: Int, atMost: Int, block: Input.() -> R): R {
override fun <R> read(offset: Int, atMost: Int, block: Source.() -> R): R = runBlocking {
readSuspend(offset, atMost, block)
}
override suspend fun <R> readSuspend(offset: Int, atMost: Int, block: suspend Source.() -> R): R {
val actualOffset = offset + fileOffset
val actualSize = min(atMost, size - offset)
val array = path.inputStream().use {
it.skip(actualOffset.toLong())
it.readNBytes(actualSize)
}
return ByteReadPacket(array).block()
return ByteArraySource(array).buffered().use { it.block() }
}
override fun view(offset: Int, binarySize: Int) = PathBinary(path, fileOffset + offset, binarySize)
}
public fun Path.asBinary(): Binary = PathBinary(this)
public fun <R> Path.read(block: Input.() -> R): R = asBinary().read(block = block)
public fun <R> Path.read(block: Source.() -> R): R = asBinary().read(block = block)
/**
* Write a live output to a newly created file. If file does not exist, throws error
*/
public fun Path.write(block: Output.() -> Unit): Unit {
public fun Path.write(block: Sink.() -> Unit): Unit {
val stream = Files.newOutputStream(this, StandardOpenOption.WRITE, StandardOpenOption.CREATE_NEW)
stream.asOutput().use(block)
stream.asSink().buffered().use(block)
}
/**
* Create a new file or append to exiting one with given output [block]
*/
public fun Path.append(block: Output.() -> Unit): Unit {
public fun Path.append(block: Sink.() -> Unit): Unit {
val stream = Files.newOutputStream(
this,
StandardOpenOption.WRITE, StandardOpenOption.APPEND, StandardOpenOption.CREATE
)
stream.asOutput().use(block)
stream.asSink().buffered().use(block)
}
/**
* Create a new file or replace existing one using given output [block]
*/
public fun Path.rewrite(block: Output.() -> Unit): Unit {
public fun Path.rewrite(block: Sink.() -> Unit): Unit {
val stream = Files.newOutputStream(
this,
StandardOpenOption.WRITE, StandardOpenOption.TRUNCATE_EXISTING, StandardOpenOption.CREATE
)
stream.asOutput().use(block)
stream.asSink().buffered().use(block)
}
public fun Path.readEnvelope(format: EnvelopeFormat): Envelope {
val partialEnvelope: PartialEnvelope = asBinary().read {
format.run {
readPartial(this@read)
}
}
val offset: Int = partialEnvelope.dataOffset.toInt()
val size: Int = partialEnvelope.dataSize?.toInt() ?: (Files.size(this).toInt() - offset)
val binary = PathBinary(this, offset, size)
return SimpleEnvelope(partialEnvelope.meta, binary)
}
@DFExperimental
public fun EnvelopeFormat.readFile(path: Path): Envelope = readFrom(path.asBinary())
/**
* Resolve IOFormat based on type
*/
@Suppress("UNCHECKED_CAST")
@DFExperimental
public inline fun <reified T : Any> IOPlugin.resolveIOFormat(): IOFormat<T>? {
return ioFormatFactories.find { it.type.isSupertypeOf(typeOf<T>()) } as IOFormat<T>?
public inline fun <reified T : Any> IOPlugin.resolveIOFormat(): IOFormat<T>? =
ioFormatFactories.find { it.type.isSupertypeOf(typeOf<T>()) } as IOFormat<T>?
public val IOPlugin.Companion.META_FILE_NAME: String get() = "@meta"
public val IOPlugin.Companion.DATA_FILE_NAME: String get() = "@data"
/**
* Read file containing meta using given [formatOverride] or file extension to infer meta type.
* If [path] is a directory search for file starting with `meta` in it.
*
* Returns null if meta could not be resolved
*/
public fun IOPlugin.readMetaFileOrNull(
path: Path,
formatOverride: MetaFormat? = null,
descriptor: MetaDescriptor? = null,
): Meta? {
if (!Files.exists(path)) return null
val actualPath: Path = if (Files.isDirectory(path)) {
Files.list(path).asSequence().find { it.fileName.startsWith(IOPlugin.META_FILE_NAME) }
?: return null
} else {
path
}
val extension = actualPath.fileName.toString().substringAfterLast('.')
val metaFormat = formatOverride ?: resolveMetaFormat(extension) ?: return null
return actualPath.read {
metaFormat.readMeta(this, descriptor)
}
}
/**
* Read file containing meta using given [formatOverride] or file extension to infer meta type.
* If [path] is a directory search for file starting with `meta` in it
* If [path] is a directory search for file starting with `meta` in it.
*
* Fails if nothing works.
*/
public fun IOPlugin.readMetaFile(
path: Path,
@ -102,7 +133,7 @@ public fun IOPlugin.readMetaFile(
if (!Files.exists(path)) error("Meta file $path does not exist")
val actualPath: Path = if (Files.isDirectory(path)) {
Files.list(path).asSequence().find { it.fileName.startsWith("meta") }
Files.list(path).asSequence().find { it.fileName.startsWith(IOPlugin.META_FILE_NAME) }
?: error("The directory $path does not contain meta file")
} else {
path
@ -110,13 +141,12 @@ public fun IOPlugin.readMetaFile(
val extension = actualPath.fileName.toString().substringAfterLast('.')
val metaFormat = formatOverride ?: resolveMetaFormat(extension) ?: error("Can't resolve meta format $extension")
return metaFormat.run {
actualPath.read {
readMeta(this, descriptor)
}
return actualPath.read {
metaFormat.readMeta(this, descriptor)
}
}
/**
* Write meta to file using [metaFormat]. If [path] is a directory, write a file with name equals name of [metaFormat].
* Like "meta.json"
@ -145,19 +175,9 @@ public fun IOPlugin.writeMetaFile(
*/
public fun IOPlugin.peekFileEnvelopeFormat(path: Path): EnvelopeFormat? {
val binary = path.asBinary()
val formats = envelopeFormatFactories.mapNotNull { factory ->
factory.peekFormat(this@peekFileEnvelopeFormat, binary)
}
return when (formats.size) {
0 -> null
1 -> formats.first()
else -> error("Envelope format binary recognition clash: $formats")
}
return peekBinaryEnvelopeFormat(binary)
}
public val IOPlugin.Companion.META_FILE_NAME: String get() = "@meta"
public val IOPlugin.Companion.DATA_FILE_NAME: String get() = "@data"
/**
* Read and envelope from file if the file exists, return null if file does not exist.
@ -203,22 +223,11 @@ public fun IOPlugin.readEnvelopeFile(
return SimpleEnvelope(meta, data)
}
return formatPicker(path)?.let { format ->
path.readEnvelope(format)
} ?: if (readNonEnvelopes) { // if no format accepts file, read it as binary
return formatPicker(path)?.readFile(path) ?: if (readNonEnvelopes) { // if no format accepts file, read it as binary
SimpleEnvelope(Meta.EMPTY, path.asBinary())
} else error("Can't infer format for file $path")
}
/**
* Write a binary into file. Throws an error if file already exists
*/
public fun <T : Any> IOFormat<T>.writeToFile(path: Path, obj: T) {
path.write {
writeObject(this, obj)
}
}
/**
* Write envelope file to given [path] using [envelopeFormat] and optional [metaFormat]
*/
@ -227,10 +236,9 @@ public fun IOPlugin.writeEnvelopeFile(
path: Path,
envelope: Envelope,
envelopeFormat: EnvelopeFormat = TaggedEnvelopeFormat,
metaFormat: MetaFormatFactory? = null,
) {
path.rewrite {
envelopeFormat.writeEnvelope(this, envelope, metaFormat ?: envelopeFormat.defaultMetaFormat)
envelopeFormat.writeTo(this, envelope)
}
}
@ -255,7 +263,7 @@ public fun IOPlugin.writeEnvelopeDirectory(
val dataFile = path.resolve(IOPlugin.DATA_FILE_NAME)
dataFile.write {
envelope.data?.read {
val copied = copyTo(this@write)
val copied = transferTo(this@write)
if (copied != envelope.data?.size?.toLong()) {
error("The number of copied bytes does not equal data size")
}

View File

@ -0,0 +1,11 @@
package space.kscience.dataforge.io
import kotlinx.io.Source
import kotlinx.io.asSource
import kotlinx.io.buffered
public fun IOPlugin.resource(name: String): Binary? = { }.javaClass.getResource(name)?.readBytes()?.asBinary()
public inline fun <R> IOPlugin.readResource(name: String, block: Source.() -> R): R =
{ }.javaClass.getResource(name)?.openStream()?.asSource()?.buffered()?.block() ?: error("Can't read resource $name")

View File

@ -0,0 +1,27 @@
package space.kscience.dataforge.io
import space.kscience.dataforge.context.ContextBuilder
import space.kscience.dataforge.meta.set
import space.kscience.dataforge.meta.string
import java.nio.file.Path
import kotlin.io.path.Path
public val IOPlugin.workDirectory: Path
get() {
val workDirectoryPath = meta[IOPlugin.WORK_DIRECTORY_KEY].string
?: context.properties[IOPlugin.WORK_DIRECTORY_KEY].string
?: ".dataforge"
return Path(workDirectoryPath)
}
public fun ContextBuilder.workDirectory(path: String) {
properties {
set(IOPlugin.WORK_DIRECTORY_KEY, path)
}
}
public fun ContextBuilder.workDirectory(path: Path) {
workDirectory(path.toAbsolutePath().toString())
}

View File

@ -1,6 +1,5 @@
package space.kscience.dataforge.io
import io.ktor.utils.io.core.writeDouble
import space.kscience.dataforge.context.Global
import space.kscience.dataforge.misc.DFExperimental
import java.nio.file.Files

View File

@ -1,6 +1,5 @@
package space.kscience.dataforge.io
import io.ktor.utils.io.core.writeDouble
import space.kscience.dataforge.context.Global
import space.kscience.dataforge.misc.DFExperimental
import java.nio.file.Files

23
dataforge-meta/README.md Normal file
View File

@ -0,0 +1,23 @@
# Module dataforge-meta
Meta definition and basic operations on meta
## Usage
## Artifact:
The Maven coordinates of this project are `space.kscience:dataforge-meta:0.7.0`.
**Gradle Kotlin DSL:**
```kotlin
repositories {
maven("https://repo.kotlin.link")
//uncomment to access development builds
//maven("https://maven.pkg.jetbrains.space/spc/p/sci/dev")
mavenCentral()
}
dependencies {
implementation("space.kscience:dataforge-meta:0.7.0")
}
```

View File

@ -7,28 +7,64 @@ public final class space/kscience/dataforge/meta/ConfigurableKt {
public static final fun configure (Lspace/kscience/dataforge/meta/Configurable;Lspace/kscience/dataforge/meta/Meta;)Lspace/kscience/dataforge/meta/Configurable;
}
public final class space/kscience/dataforge/meta/DoubleArrayValue : java/lang/Iterable, kotlin/jvm/internal/markers/KMappedMarker, space/kscience/dataforge/meta/Value {
public fun <init> ([D)V
public fun equals (Ljava/lang/Object;)Z
public fun getList ()Ljava/util/List;
public fun getType ()Lspace/kscience/dataforge/meta/ValueType;
public synthetic fun getValue ()Ljava/lang/Object;
public fun getValue ()[D
public fun hashCode ()I
public fun iterator ()Ljava/util/Iterator;
public fun toString ()Ljava/lang/String;
}
public final class space/kscience/dataforge/meta/EnumValue : space/kscience/dataforge/meta/Value {
public fun <init> (Ljava/lang/Enum;)V
public fun equals (Ljava/lang/Object;)Z
public fun getType ()Lspace/kscience/dataforge/meta/ValueType;
public fun getValue ()Ljava/lang/Enum;
public synthetic fun getValue ()Ljava/lang/Object;
public fun hashCode ()I
public fun toString ()Ljava/lang/String;
}
public final class space/kscience/dataforge/meta/ExoticValuesKt {
public static final fun asValue ([D)Lspace/kscience/dataforge/meta/Value;
public static final fun lazyParseValue (Ljava/lang/String;)Lspace/kscience/dataforge/meta/LazyParsedValue;
}
public final class space/kscience/dataforge/meta/False : space/kscience/dataforge/meta/Value {
public static final field INSTANCE Lspace/kscience/dataforge/meta/False;
public fun equals (Ljava/lang/Object;)Z
public fun getType ()Lspace/kscience/dataforge/meta/ValueType;
public fun getValue ()Ljava/lang/Object;
public fun hashCode ()I
public fun toString ()Ljava/lang/String;
}
public final class space/kscience/dataforge/meta/JsonMetaKt {
public static final fun getJSON_ARRAY_KEY (Lspace/kscience/dataforge/meta/Meta$Companion;)Ljava/lang/String;
public static final fun toJson (Lspace/kscience/dataforge/meta/Meta;Lspace/kscience/dataforge/meta/descriptors/MetaDescriptor;)Lkotlinx/serialization/json/JsonObject;
public static final fun toJson (Lspace/kscience/dataforge/values/Value;Lspace/kscience/dataforge/meta/descriptors/MetaDescriptor;)Lkotlinx/serialization/json/JsonElement;
public static synthetic fun toJson$default (Lspace/kscience/dataforge/meta/Meta;Lspace/kscience/dataforge/meta/descriptors/MetaDescriptor;ILjava/lang/Object;)Lkotlinx/serialization/json/JsonObject;
public static synthetic fun toJson$default (Lspace/kscience/dataforge/values/Value;Lspace/kscience/dataforge/meta/descriptors/MetaDescriptor;ILjava/lang/Object;)Lkotlinx/serialization/json/JsonElement;
public static final fun toJson (Lspace/kscience/dataforge/meta/Meta;Lspace/kscience/dataforge/meta/descriptors/MetaDescriptor;)Lkotlinx/serialization/json/JsonElement;
public static final fun toJson (Lspace/kscience/dataforge/meta/Value;Lspace/kscience/dataforge/meta/descriptors/MetaDescriptor;)Lkotlinx/serialization/json/JsonElement;
public static synthetic fun toJson$default (Lspace/kscience/dataforge/meta/Meta;Lspace/kscience/dataforge/meta/descriptors/MetaDescriptor;ILjava/lang/Object;)Lkotlinx/serialization/json/JsonElement;
public static synthetic fun toJson$default (Lspace/kscience/dataforge/meta/Value;Lspace/kscience/dataforge/meta/descriptors/MetaDescriptor;ILjava/lang/Object;)Lkotlinx/serialization/json/JsonElement;
public static final fun toMeta (Lkotlinx/serialization/json/JsonElement;Lspace/kscience/dataforge/meta/descriptors/MetaDescriptor;)Lspace/kscience/dataforge/meta/SealedMeta;
public static final fun toMeta (Lkotlinx/serialization/json/JsonObject;Lspace/kscience/dataforge/meta/descriptors/MetaDescriptor;)Lspace/kscience/dataforge/meta/SealedMeta;
public static synthetic fun toMeta$default (Lkotlinx/serialization/json/JsonElement;Lspace/kscience/dataforge/meta/descriptors/MetaDescriptor;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/SealedMeta;
public static synthetic fun toMeta$default (Lkotlinx/serialization/json/JsonObject;Lspace/kscience/dataforge/meta/descriptors/MetaDescriptor;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/SealedMeta;
public static final fun toValue (Lkotlinx/serialization/json/JsonPrimitive;Lspace/kscience/dataforge/meta/descriptors/MetaDescriptor;)Lspace/kscience/dataforge/values/Value;
public static final fun toValue (Lkotlinx/serialization/json/JsonPrimitive;Lspace/kscience/dataforge/meta/descriptors/MetaDescriptor;)Lspace/kscience/dataforge/meta/Value;
}
public final class space/kscience/dataforge/meta/Laminate : space/kscience/dataforge/meta/TypedMeta {
public static final field Companion Lspace/kscience/dataforge/meta/Laminate$Companion;
public fun equals (Ljava/lang/Object;)Z
public fun get (Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/Laminate;
public synthetic fun get (Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/Meta;
public synthetic fun get (Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/TypedMeta;
public fun getItems ()Ljava/util/Map;
public final fun getLayers ()Ljava/util/List;
public fun getMeta (Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/Laminate;
public synthetic fun getMeta (Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/Meta;
public synthetic fun getMeta (Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/TypedMeta;
public fun getValue ()Lspace/kscience/dataforge/values/Value;
public fun getValue ()Lspace/kscience/dataforge/meta/Value;
public fun hashCode ()I
public final fun merge ()Lspace/kscience/dataforge/meta/SealedMeta;
public fun toString ()Ljava/lang/String;
@ -48,10 +84,36 @@ public final class space/kscience/dataforge/meta/LaminateKt {
public static final fun withTop (Lspace/kscience/dataforge/meta/Laminate;Lspace/kscience/dataforge/meta/Meta;)Lspace/kscience/dataforge/meta/Laminate;
}
public final class space/kscience/dataforge/meta/LazyParsedValue : space/kscience/dataforge/meta/Value {
public fun <init> (Ljava/lang/String;)V
public fun equals (Ljava/lang/Object;)Z
public final fun getString ()Ljava/lang/String;
public fun getType ()Lspace/kscience/dataforge/meta/ValueType;
public fun getValue ()Ljava/lang/Object;
public fun hashCode ()I
public fun toString ()Ljava/lang/String;
}
public final class space/kscience/dataforge/meta/ListValue : java/lang/Iterable, kotlin/jvm/internal/markers/KMappedMarker, space/kscience/dataforge/meta/Value {
public static final field Companion Lspace/kscience/dataforge/meta/ListValue$Companion;
public fun <init> (Ljava/util/List;)V
public fun equals (Ljava/lang/Object;)Z
public fun getList ()Ljava/util/List;
public fun getType ()Lspace/kscience/dataforge/meta/ValueType;
public synthetic fun getValue ()Ljava/lang/Object;
public fun getValue ()Ljava/util/List;
public fun hashCode ()I
public fun iterator ()Ljava/util/Iterator;
public fun toString ()Ljava/lang/String;
}
public final class space/kscience/dataforge/meta/ListValue$Companion {
public final fun getEMPTY ()Lspace/kscience/dataforge/meta/ListValue;
}
public final class space/kscience/dataforge/meta/MapMetaKt {
public static final fun toMap (Lspace/kscience/dataforge/meta/Meta;Lspace/kscience/dataforge/meta/descriptors/MetaDescriptor;)Ljava/util/Map;
public static synthetic fun toMap$default (Lspace/kscience/dataforge/meta/Meta;Lspace/kscience/dataforge/meta/descriptors/MetaDescriptor;ILjava/lang/Object;)Ljava/util/Map;
public static synthetic fun toMeta$default (Ljava/util/Map;Lspace/kscience/dataforge/meta/descriptors/MetaDescriptor;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/Meta;
}
public abstract interface class space/kscience/dataforge/meta/Meta : space/kscience/dataforge/meta/MetaProvider, space/kscience/dataforge/meta/MetaRepr {
@ -60,9 +122,9 @@ public abstract interface class space/kscience/dataforge/meta/Meta : space/kscie
public static final field TYPE Ljava/lang/String;
public static final field VALUE_KEY Ljava/lang/String;
public abstract fun equals (Ljava/lang/Object;)Z
public fun get (Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/Meta;
public abstract fun getItems ()Ljava/util/Map;
public fun getMeta (Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/Meta;
public abstract fun getValue ()Lspace/kscience/dataforge/values/Value;
public abstract fun getValue ()Lspace/kscience/dataforge/meta/Value;
public abstract fun hashCode ()I
public fun toMeta ()Lspace/kscience/dataforge/meta/Meta;
public abstract fun toString ()Ljava/lang/String;
@ -75,10 +137,26 @@ public final class space/kscience/dataforge/meta/Meta$Companion {
public final fun equals (Lspace/kscience/dataforge/meta/Meta;Lspace/kscience/dataforge/meta/Meta;)Z
public final fun getEMPTY ()Lspace/kscience/dataforge/meta/Meta;
public final fun hashCode (Lspace/kscience/dataforge/meta/Meta;)I
public final fun serializer ()Lkotlinx/serialization/KSerializer;
public final fun toString (Lspace/kscience/dataforge/meta/Meta;)Ljava/lang/String;
}
public abstract interface annotation class space/kscience/dataforge/meta/MetaBuilder : java/lang/annotation/Annotation {
public final class space/kscience/dataforge/meta/MetaBuilder : space/kscience/dataforge/meta/MutableMeta {
public fun <init> ()V
public fun <init> (Lspace/kscience/dataforge/meta/Value;Ljava/util/Map;)V
public synthetic fun <init> (Lspace/kscience/dataforge/meta/Value;Ljava/util/Map;ILkotlin/jvm/internal/DefaultConstructorMarker;)V
public fun equals (Ljava/lang/Object;)Z
public fun getItems ()Ljava/util/Map;
public fun getOrCreate (Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/MetaBuilder;
public synthetic fun getOrCreate (Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/MutableMeta;
public fun getValue ()Lspace/kscience/dataforge/meta/Value;
public fun hashCode ()I
public fun set (Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/Meta;)V
public fun setValue (Lspace/kscience/dataforge/meta/Value;)V
public fun toString ()Ljava/lang/String;
}
public abstract interface annotation class space/kscience/dataforge/meta/MetaBuilderMarker : java/lang/annotation/Annotation {
}
public final class space/kscience/dataforge/meta/MetaDelegateKt {
@ -136,11 +214,13 @@ public final class space/kscience/dataforge/meta/MetaKt {
public static final fun getBoolean (Lspace/kscience/dataforge/meta/Meta;)Ljava/lang/Boolean;
public static final fun getDouble (Lspace/kscience/dataforge/meta/Meta;)Ljava/lang/Double;
public static final fun getFloat (Lspace/kscience/dataforge/meta/Meta;)Ljava/lang/Float;
public static final fun getIndexed (Lspace/kscience/dataforge/meta/Meta;Ljava/lang/String;)Ljava/util/Map;
public static final fun getIndexed (Lspace/kscience/dataforge/meta/Meta;Lspace/kscience/dataforge/names/Name;)Ljava/util/Map;
public static final fun getIndexed (Lspace/kscience/dataforge/meta/TypedMeta;Ljava/lang/String;)Ljava/util/Map;
public static final fun getIndexed (Lspace/kscience/dataforge/meta/TypedMeta;Lspace/kscience/dataforge/names/Name;)Ljava/util/Map;
public static final fun getInt (Lspace/kscience/dataforge/meta/Meta;)Ljava/lang/Integer;
public static final fun getLong (Lspace/kscience/dataforge/meta/Meta;)Ljava/lang/Long;
public static final synthetic fun getNonNullable (Lspace/kscience/dataforge/meta/Meta;Lspace/kscience/dataforge/names/NameToken;)Lspace/kscience/dataforge/meta/Meta;
public static final fun getNumber (Lspace/kscience/dataforge/meta/Meta;)Ljava/lang/Number;
public static final fun getSelf (Lspace/kscience/dataforge/meta/TypedMeta;)Lspace/kscience/dataforge/meta/TypedMeta;
public static final fun getShort (Lspace/kscience/dataforge/meta/Meta;)Ljava/lang/Short;
@ -151,12 +231,12 @@ public final class space/kscience/dataforge/meta/MetaKt {
public static final fun iterator (Lspace/kscience/dataforge/meta/Meta;)Ljava/util/Iterator;
public static final fun nodeSequence (Lspace/kscience/dataforge/meta/Meta;)Lkotlin/sequences/Sequence;
public static final fun valueSequence (Lspace/kscience/dataforge/meta/Meta;)Lkotlin/sequences/Sequence;
public static final fun withDefault (Lspace/kscience/dataforge/meta/Meta;Lspace/kscience/dataforge/meta/Meta;)Lspace/kscience/dataforge/meta/Meta;
public static final fun withDefault (Lspace/kscience/dataforge/meta/Meta;Lspace/kscience/dataforge/meta/MetaProvider;)Lspace/kscience/dataforge/meta/Meta;
}
public abstract interface class space/kscience/dataforge/meta/MetaProvider : space/kscience/dataforge/values/ValueProvider {
public abstract fun getMeta (Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/Meta;
public fun getValue (Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/values/Value;
public abstract interface class space/kscience/dataforge/meta/MetaProvider : space/kscience/dataforge/meta/ValueProvider {
public abstract fun get (Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/Meta;
public fun getValue (Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/Value;
}
public abstract interface class space/kscience/dataforge/meta/MetaRepr {
@ -173,18 +253,19 @@ public final class space/kscience/dataforge/meta/MetaSerializer : kotlinx/serial
}
public abstract interface class space/kscience/dataforge/meta/MutableMeta : space/kscience/dataforge/meta/Meta, space/kscience/dataforge/meta/MutableMetaProvider {
public static final field Companion Lspace/kscience/dataforge/meta/MutableMeta$Companion;
public synthetic fun get (Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/Meta;
public fun get (Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/MutableMeta;
public abstract fun getItems ()Ljava/util/Map;
public synthetic fun getMeta (Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/Meta;
public fun getMeta (Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/MutableMeta;
public abstract fun getOrCreate (Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/MutableMeta;
public abstract fun getValue ()Lspace/kscience/dataforge/values/Value;
public abstract fun getValue ()Lspace/kscience/dataforge/meta/Value;
public fun put (Ljava/lang/String;Ljava/lang/Enum;)V
public fun put (Ljava/lang/String;Ljava/lang/Number;)V
public fun put (Ljava/lang/String;Ljava/lang/String;)V
public fun put (Ljava/lang/String;Lkotlin/jvm/functions/Function1;)V
public fun put (Ljava/lang/String;Lspace/kscience/dataforge/meta/Meta;)V
public fun put (Ljava/lang/String;Lspace/kscience/dataforge/meta/MetaRepr;)V
public fun put (Ljava/lang/String;Lspace/kscience/dataforge/values/Value;)V
public fun put (Ljava/lang/String;Lspace/kscience/dataforge/meta/Value;)V
public fun put (Ljava/lang/String;Z)V
public fun put (Ljava/lang/String;[D)V
public fun put (Lspace/kscience/dataforge/names/Name;Ljava/lang/Enum;)V
@ -193,12 +274,16 @@ public abstract interface class space/kscience/dataforge/meta/MutableMeta : spac
public fun put (Lspace/kscience/dataforge/names/Name;Lkotlin/jvm/functions/Function1;)V
public fun put (Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/Meta;)V
public fun put (Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/MetaRepr;)V
public fun put (Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/values/Value;)V
public fun put (Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/Value;)V
public fun put (Lspace/kscience/dataforge/names/Name;Z)V
public fun putIndexed (Ljava/lang/String;Ljava/lang/Iterable;)V
public fun putIndexed (Lspace/kscience/dataforge/names/Name;Ljava/lang/Iterable;)V
public fun setValue (Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/values/Value;)V
public abstract fun setValue (Lspace/kscience/dataforge/values/Value;)V
public abstract fun setValue (Lspace/kscience/dataforge/meta/Value;)V
public fun setValue (Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/Value;)V
}
public final class space/kscience/dataforge/meta/MutableMeta$Companion {
public final fun serializer ()Lkotlinx/serialization/KSerializer;
}
public final class space/kscience/dataforge/meta/MutableMetaDelegateKt {
@ -257,13 +342,13 @@ public final class space/kscience/dataforge/meta/MutableMetaDelegateKt {
}
public final class space/kscience/dataforge/meta/MutableMetaKt {
public static final fun MutableMeta ()Lspace/kscience/dataforge/meta/ObservableMutableMeta;
public static final fun MutableMeta (Lkotlin/jvm/functions/Function1;)Lspace/kscience/dataforge/meta/ObservableMutableMeta;
public static synthetic fun MutableMeta$default (Lkotlin/jvm/functions/Function1;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/ObservableMutableMeta;
public static final fun ObservableMutableMeta ()Lspace/kscience/dataforge/meta/ObservableMutableMeta;
public static final fun ObservableMutableMeta (Lkotlin/jvm/functions/Function1;)Lspace/kscience/dataforge/meta/ObservableMutableMeta;
public static synthetic fun ObservableMutableMeta$default (Lkotlin/jvm/functions/Function1;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/ObservableMutableMeta;
public static final fun append (Lspace/kscience/dataforge/meta/MutableMeta;Ljava/lang/String;Lspace/kscience/dataforge/meta/Meta;)V
public static final fun append (Lspace/kscience/dataforge/meta/MutableMeta;Ljava/lang/String;Lspace/kscience/dataforge/values/Value;)V
public static final fun append (Lspace/kscience/dataforge/meta/MutableMeta;Ljava/lang/String;Lspace/kscience/dataforge/meta/Value;)V
public static final fun append (Lspace/kscience/dataforge/meta/MutableMeta;Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/Meta;)V
public static final fun append (Lspace/kscience/dataforge/meta/MutableMeta;Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/values/Value;)V
public static final fun append (Lspace/kscience/dataforge/meta/MutableMeta;Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/Value;)V
public static final fun asMutableMeta (Lspace/kscience/dataforge/meta/Meta;)Lspace/kscience/dataforge/meta/MutableMeta;
public static final fun copy (Lspace/kscience/dataforge/meta/Meta;Lkotlin/jvm/functions/Function1;)Lspace/kscience/dataforge/meta/Meta;
public static synthetic fun copy$default (Lspace/kscience/dataforge/meta/Meta;Lkotlin/jvm/functions/Function1;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/Meta;
@ -272,38 +357,23 @@ public final class space/kscience/dataforge/meta/MutableMetaKt {
public static final fun getOrCreate (Lspace/kscience/dataforge/meta/MutableTypedMeta;Ljava/lang/String;)Lspace/kscience/dataforge/meta/MutableTypedMeta;
public static final fun remove (Lspace/kscience/dataforge/meta/MutableMetaProvider;Ljava/lang/String;)V
public static final fun remove (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;)V
public static final fun set (Lspace/kscience/dataforge/meta/MutableMeta;Ljava/lang/String;Ljava/lang/Iterable;)V
public static final fun set (Lspace/kscience/dataforge/meta/MutableMeta;Ljava/lang/String;Ljava/lang/Number;)V
public static final fun set (Lspace/kscience/dataforge/meta/MutableMeta;Ljava/lang/String;Ljava/lang/String;)V
public static final fun set (Lspace/kscience/dataforge/meta/MutableMeta;Ljava/lang/String;Ljava/util/List;)V
public static final fun set (Lspace/kscience/dataforge/meta/MutableMeta;Ljava/lang/String;Lspace/kscience/dataforge/values/Value;)V
public static final fun set (Lspace/kscience/dataforge/meta/MutableMeta;Ljava/lang/String;Z)V
public static final fun set (Lspace/kscience/dataforge/meta/MutableMeta;Lspace/kscience/dataforge/names/Name;Ljava/lang/Iterable;)V
public static final fun set (Lspace/kscience/dataforge/meta/MutableMeta;Lspace/kscience/dataforge/names/Name;Ljava/lang/Number;)V
public static final fun set (Lspace/kscience/dataforge/meta/MutableMeta;Lspace/kscience/dataforge/names/Name;Ljava/lang/String;)V
public static final fun set (Lspace/kscience/dataforge/meta/MutableMeta;Lspace/kscience/dataforge/names/Name;Ljava/util/List;)V
public static final fun set (Lspace/kscience/dataforge/meta/MutableMeta;Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/Meta;)V
public static final fun set (Lspace/kscience/dataforge/meta/MutableMeta;Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/values/Value;)V
public static final fun set (Lspace/kscience/dataforge/meta/MutableMeta;Lspace/kscience/dataforge/names/Name;Z)V
public static final fun set (Lspace/kscience/dataforge/meta/MutableMeta;Lspace/kscience/dataforge/names/NameToken;Ljava/lang/Number;)V
public static final fun set (Lspace/kscience/dataforge/meta/MutableMeta;Lspace/kscience/dataforge/names/NameToken;Ljava/lang/String;)V
public static final fun set (Lspace/kscience/dataforge/meta/MutableMeta;Lspace/kscience/dataforge/names/NameToken;Ljava/util/List;)V
public static final fun set (Lspace/kscience/dataforge/meta/MutableMeta;Lspace/kscience/dataforge/names/NameToken;Lspace/kscience/dataforge/values/Value;)V
public static final fun set (Lspace/kscience/dataforge/meta/MutableMeta;Lspace/kscience/dataforge/names/NameToken;Z)V
public static final fun set (Lspace/kscience/dataforge/meta/MutableMetaProvider;Ljava/lang/String;Ljava/lang/Iterable;)V
public static final fun set (Lspace/kscience/dataforge/meta/MutableMetaProvider;Ljava/lang/String;Lspace/kscience/dataforge/meta/Meta;)V
public static final fun set (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;Ljava/lang/Iterable;)V
public static final fun set (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/NameToken;Lspace/kscience/dataforge/meta/Meta;)V
public static final fun set (Lspace/kscience/dataforge/meta/MutableTypedMeta;Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/values/Value;)V
public static final fun setIndexed (Lspace/kscience/dataforge/meta/MutableMeta;Lspace/kscience/dataforge/names/Name;Ljava/lang/Iterable;Lkotlin/jvm/functions/Function2;)V
public static synthetic fun setIndexed$default (Lspace/kscience/dataforge/meta/MutableMeta;Lspace/kscience/dataforge/names/Name;Ljava/lang/Iterable;Lkotlin/jvm/functions/Function2;ILjava/lang/Object;)V
public static final fun set (Lspace/kscience/dataforge/meta/MutableTypedMeta;Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/Value;)V
public static final fun set (Lspace/kscience/dataforge/meta/MutableValueProvider;Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/Value;)V
public static final fun setIndexed (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;Ljava/lang/Iterable;Lkotlin/jvm/functions/Function2;)V
public static synthetic fun setIndexed$default (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;Ljava/lang/Iterable;Lkotlin/jvm/functions/Function2;ILjava/lang/Object;)V
public static final fun toMutableMeta (Lspace/kscience/dataforge/meta/Meta;)Lspace/kscience/dataforge/meta/ObservableMutableMeta;
public static final fun update (Lspace/kscience/dataforge/meta/MutableMeta;Lspace/kscience/dataforge/meta/Meta;)V
public static final fun withDefault (Lspace/kscience/dataforge/meta/MutableMeta;Lspace/kscience/dataforge/meta/Meta;)Lspace/kscience/dataforge/meta/MutableMeta;
public static final fun update (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/meta/Meta;)V
public static final fun withDefault (Lspace/kscience/dataforge/meta/MutableMeta;Lspace/kscience/dataforge/meta/MetaProvider;)Lspace/kscience/dataforge/meta/MutableMeta;
}
public abstract interface class space/kscience/dataforge/meta/MutableMetaProvider : space/kscience/dataforge/meta/MetaProvider, space/kscience/dataforge/values/MutableValueProvider {
public abstract fun getMeta (Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/MutableMeta;
public abstract fun setMeta (Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/Meta;)V
public fun setValue (Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/values/Value;)V
public abstract interface class space/kscience/dataforge/meta/MutableMetaProvider : space/kscience/dataforge/meta/MetaProvider, space/kscience/dataforge/meta/MutableValueProvider {
public abstract fun get (Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/MutableMeta;
public abstract fun set (Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/Meta;)V
public abstract fun setValue (Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/Value;)V
}
public final class space/kscience/dataforge/meta/MutableMetaSerializer : kotlinx/serialization/KSerializer {
@ -316,10 +386,33 @@ public final class space/kscience/dataforge/meta/MutableMetaSerializer : kotlinx
}
public abstract interface class space/kscience/dataforge/meta/MutableTypedMeta : space/kscience/dataforge/meta/MutableMeta, space/kscience/dataforge/meta/TypedMeta {
public abstract fun getMeta (Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/MutableTypedMeta;
public abstract fun get (Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/MutableTypedMeta;
public abstract fun getOrCreate (Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/MutableTypedMeta;
}
public abstract interface class space/kscience/dataforge/meta/MutableValueProvider : space/kscience/dataforge/meta/ValueProvider {
public abstract fun setValue (Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/Value;)V
}
public final class space/kscience/dataforge/meta/Null : space/kscience/dataforge/meta/Value {
public static final field INSTANCE Lspace/kscience/dataforge/meta/Null;
public fun equals (Ljava/lang/Object;)Z
public fun getType ()Lspace/kscience/dataforge/meta/ValueType;
public fun getValue ()Ljava/lang/Object;
public fun hashCode ()I
public fun toString ()Ljava/lang/String;
}
public final class space/kscience/dataforge/meta/NumberValue : space/kscience/dataforge/meta/Value {
public fun <init> (Ljava/lang/Number;)V
public fun equals (Ljava/lang/Object;)Z
public final fun getNumber ()Ljava/lang/Number;
public fun getType ()Lspace/kscience/dataforge/meta/ValueType;
public fun getValue ()Ljava/lang/Object;
public fun hashCode ()I
public fun toString ()Ljava/lang/String;
}
public abstract interface class space/kscience/dataforge/meta/ObservableMeta : space/kscience/dataforge/meta/Meta {
public abstract fun invalidate (Lspace/kscience/dataforge/names/Name;)V
public abstract fun onChange (Ljava/lang/Object;Lkotlin/jvm/functions/Function2;)V
@ -336,15 +429,15 @@ public final class space/kscience/dataforge/meta/ObservableMetaWrapperKt {
}
public abstract interface class space/kscience/dataforge/meta/ObservableMutableMeta : space/kscience/dataforge/meta/MutableMeta, space/kscience/dataforge/meta/MutableTypedMeta, space/kscience/dataforge/meta/ObservableMeta {
public synthetic fun getMeta (Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/Meta;
public synthetic fun getMeta (Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/MutableMeta;
public synthetic fun getMeta (Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/MutableTypedMeta;
public fun getMeta (Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/ObservableMutableMeta;
public synthetic fun getMeta (Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/TypedMeta;
public synthetic fun get (Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/Meta;
public synthetic fun get (Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/MutableMeta;
public synthetic fun get (Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/MutableTypedMeta;
public fun get (Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/ObservableMutableMeta;
public synthetic fun get (Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/TypedMeta;
public abstract fun getOrCreate (Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/ObservableMutableMeta;
}
public abstract interface class space/kscience/dataforge/meta/ReadOnlySpecification {
public abstract interface class space/kscience/dataforge/meta/ReadOnlySpecification : space/kscience/dataforge/meta/descriptors/Described {
public abstract fun empty ()Ljava/lang/Object;
public fun invoke (Lkotlin/jvm/functions/Function1;)Ljava/lang/Object;
public abstract fun read (Lspace/kscience/dataforge/meta/Meta;)Ljava/lang/Object;
@ -352,24 +445,26 @@ public abstract interface class space/kscience/dataforge/meta/ReadOnlySpecificat
public class space/kscience/dataforge/meta/Scheme : space/kscience/dataforge/meta/Configurable, space/kscience/dataforge/meta/MetaRepr, space/kscience/dataforge/meta/MutableMetaProvider, space/kscience/dataforge/meta/descriptors/Described {
public fun <init> ()V
public synthetic fun get (Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/Meta;
public fun get (Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/MutableMeta;
public final fun getDescriptor ()Lspace/kscience/dataforge/meta/descriptors/MetaDescriptor;
public synthetic fun getMeta ()Lspace/kscience/dataforge/meta/MutableMeta;
public final fun getMeta ()Lspace/kscience/dataforge/meta/ObservableMutableMeta;
public synthetic fun getMeta (Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/Meta;
public fun getMeta (Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/MutableMeta;
public fun setMeta (Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/Meta;)V
public fun setValue (Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/values/Value;)V
public fun set (Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/Meta;)V
public fun setValue (Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/Value;)V
public fun toMeta ()Lspace/kscience/dataforge/meta/Laminate;
public synthetic fun toMeta ()Lspace/kscience/dataforge/meta/Meta;
public fun validate (Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/Meta;)Z
}
public final class space/kscience/dataforge/meta/SchemeKt {
public static final fun copy (Lspace/kscience/dataforge/meta/Scheme;Lspace/kscience/dataforge/meta/SchemeSpec;Lkotlin/jvm/functions/Function1;)Lspace/kscience/dataforge/meta/Scheme;
public static synthetic fun copy$default (Lspace/kscience/dataforge/meta/Scheme;Lspace/kscience/dataforge/meta/SchemeSpec;Lkotlin/jvm/functions/Function1;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/Scheme;
public static final fun invoke (Lspace/kscience/dataforge/meta/Scheme;Lkotlin/jvm/functions/Function1;)Lspace/kscience/dataforge/meta/Scheme;
public static final fun retarget (Lspace/kscience/dataforge/meta/Scheme;Lspace/kscience/dataforge/meta/MutableMeta;)Lspace/kscience/dataforge/meta/Scheme;
}
public class space/kscience/dataforge/meta/SchemeSpec : space/kscience/dataforge/meta/Specification, space/kscience/dataforge/meta/descriptors/Described {
public class space/kscience/dataforge/meta/SchemeSpec : space/kscience/dataforge/meta/Specification {
public fun <init> (Lkotlin/jvm/functions/Function0;)V
public synthetic fun empty ()Ljava/lang/Object;
public fun empty ()Lspace/kscience/dataforge/meta/Scheme;
@ -384,18 +479,16 @@ public class space/kscience/dataforge/meta/SchemeSpec : space/kscience/dataforge
public final class space/kscience/dataforge/meta/SealedMeta : space/kscience/dataforge/meta/TypedMeta {
public static final field Companion Lspace/kscience/dataforge/meta/SealedMeta$Companion;
public synthetic fun <init> (ILspace/kscience/dataforge/values/Value;Ljava/util/Map;Lkotlinx/serialization/internal/SerializationConstructorMarker;)V
public fun <init> (Lspace/kscience/dataforge/meta/Value;Ljava/util/Map;)V
public fun equals (Ljava/lang/Object;)Z
public fun getItems ()Ljava/util/Map;
public fun getValue ()Lspace/kscience/dataforge/values/Value;
public fun getValue ()Lspace/kscience/dataforge/meta/Value;
public fun hashCode ()I
public fun toString ()Ljava/lang/String;
public static final fun write$Self (Lspace/kscience/dataforge/meta/SealedMeta;Lkotlinx/serialization/encoding/CompositeEncoder;Lkotlinx/serialization/descriptors/SerialDescriptor;)V
}
public final class space/kscience/dataforge/meta/SealedMeta$$serializer : kotlinx/serialization/internal/GeneratedSerializer {
public static final field INSTANCE Lspace/kscience/dataforge/meta/SealedMeta$$serializer;
public static final synthetic field descriptor Lkotlinx/serialization/descriptors/SerialDescriptor;
public fun childSerializers ()[Lkotlinx/serialization/KSerializer;
public synthetic fun deserialize (Lkotlinx/serialization/encoding/Decoder;)Ljava/lang/Object;
public fun deserialize (Lkotlinx/serialization/encoding/Decoder;)Lspace/kscience/dataforge/meta/SealedMeta;
@ -412,9 +505,13 @@ public final class space/kscience/dataforge/meta/SealedMeta$Companion {
public final class space/kscience/dataforge/meta/SealedMetaKt {
public static final fun Meta (Ljava/lang/Number;)Lspace/kscience/dataforge/meta/SealedMeta;
public static final fun Meta (Ljava/lang/String;)Lspace/kscience/dataforge/meta/SealedMeta;
public static final fun Meta (Lkotlin/jvm/functions/Function1;)Lspace/kscience/dataforge/meta/SealedMeta;
public static final fun Meta (Lspace/kscience/dataforge/values/Value;)Lspace/kscience/dataforge/meta/SealedMeta;
public static final fun Meta (Lkotlin/jvm/functions/Function1;)Lspace/kscience/dataforge/meta/Meta;
public static final fun Meta (Lspace/kscience/dataforge/meta/Value;)Lspace/kscience/dataforge/meta/SealedMeta;
public static final fun Meta (Z)Lspace/kscience/dataforge/meta/SealedMeta;
public static final fun MutableMeta ()Lspace/kscience/dataforge/meta/MutableMeta;
public static final fun MutableMeta (Lkotlin/jvm/functions/Function1;)Lspace/kscience/dataforge/meta/MutableMeta;
public static synthetic fun MutableMeta$default (Lkotlin/jvm/functions/Function1;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/MutableMeta;
public static final fun SealedMeta (Lkotlin/jvm/functions/Function1;)Lspace/kscience/dataforge/meta/SealedMeta;
public static final fun seal (Lspace/kscience/dataforge/meta/Meta;)Lspace/kscience/dataforge/meta/SealedMeta;
}
@ -423,22 +520,151 @@ public abstract interface class space/kscience/dataforge/meta/Specification : sp
}
public final class space/kscience/dataforge/meta/SpecificationKt {
public static synthetic fun listOfSpec$default (Lspace/kscience/dataforge/meta/MutableMeta;Lspace/kscience/dataforge/meta/Specification;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadWriteProperty;
public static final fun spec (Lspace/kscience/dataforge/meta/MutableMeta;Lspace/kscience/dataforge/meta/Specification;Lspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadWriteProperty;
public static final fun spec (Lspace/kscience/dataforge/meta/Scheme;Lspace/kscience/dataforge/meta/Specification;Lspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadWriteProperty;
public static synthetic fun spec$default (Lspace/kscience/dataforge/meta/MutableMeta;Lspace/kscience/dataforge/meta/Specification;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadWriteProperty;
public static synthetic fun spec$default (Lspace/kscience/dataforge/meta/Scheme;Lspace/kscience/dataforge/meta/Specification;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadWriteProperty;
public static final fun specOrNull (Lspace/kscience/dataforge/meta/MutableMeta;Lspace/kscience/dataforge/meta/Specification;Lspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadWriteProperty;
public static final fun specOrNull (Lspace/kscience/dataforge/meta/Scheme;Lspace/kscience/dataforge/meta/Specification;Lspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadWriteProperty;
public static synthetic fun specOrNull$default (Lspace/kscience/dataforge/meta/MutableMeta;Lspace/kscience/dataforge/meta/Specification;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadWriteProperty;
public static synthetic fun specOrNull$default (Lspace/kscience/dataforge/meta/Scheme;Lspace/kscience/dataforge/meta/Specification;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadWriteProperty;
public static final fun updateWith (Lspace/kscience/dataforge/meta/Configurable;Lspace/kscience/dataforge/meta/Specification;Lkotlin/jvm/functions/Function1;)Ljava/lang/Object;
public static final fun updateWith (Lspace/kscience/dataforge/meta/MutableMeta;Lspace/kscience/dataforge/meta/Specification;Lkotlin/jvm/functions/Function1;)Ljava/lang/Object;
}
public final class space/kscience/dataforge/meta/StringValue : space/kscience/dataforge/meta/Value {
public static final synthetic fun box-impl (Ljava/lang/String;)Lspace/kscience/dataforge/meta/StringValue;
public static fun constructor-impl (Ljava/lang/String;)Ljava/lang/String;
public fun equals (Ljava/lang/Object;)Z
public static fun equals-impl (Ljava/lang/String;Ljava/lang/Object;)Z
public static final fun equals-impl0 (Ljava/lang/String;Ljava/lang/String;)Z
public final fun getString ()Ljava/lang/String;
public fun getType ()Lspace/kscience/dataforge/meta/ValueType;
public static fun getType-impl (Ljava/lang/String;)Lspace/kscience/dataforge/meta/ValueType;
public fun getValue ()Ljava/lang/Object;
public static fun getValue-impl (Ljava/lang/String;)Ljava/lang/Object;
public fun hashCode ()I
public static fun hashCode-impl (Ljava/lang/String;)I
public fun toString ()Ljava/lang/String;
public static fun toString-impl (Ljava/lang/String;)Ljava/lang/String;
public final synthetic fun unbox-impl ()Ljava/lang/String;
}
public final class space/kscience/dataforge/meta/True : space/kscience/dataforge/meta/Value {
public static final field INSTANCE Lspace/kscience/dataforge/meta/True;
public fun equals (Ljava/lang/Object;)Z
public fun getType ()Lspace/kscience/dataforge/meta/ValueType;
public fun getValue ()Ljava/lang/Object;
public fun hashCode ()I
public fun toString ()Ljava/lang/String;
}
public abstract interface class space/kscience/dataforge/meta/TypedMeta : space/kscience/dataforge/meta/Meta {
public synthetic fun get (Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/Meta;
public fun get (Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/TypedMeta;
public abstract fun getItems ()Ljava/util/Map;
public synthetic fun getMeta (Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/Meta;
public fun getMeta (Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/TypedMeta;
public fun toMeta ()Lspace/kscience/dataforge/meta/Meta;
}
public abstract interface class space/kscience/dataforge/meta/Value {
public static final field Companion Lspace/kscience/dataforge/meta/Value$Companion;
public static final field TYPE Ljava/lang/String;
public abstract fun equals (Ljava/lang/Object;)Z
public fun getList ()Ljava/util/List;
public abstract fun getType ()Lspace/kscience/dataforge/meta/ValueType;
public abstract fun getValue ()Ljava/lang/Object;
public abstract fun hashCode ()I
public abstract fun toString ()Ljava/lang/String;
}
public final class space/kscience/dataforge/meta/Value$Companion {
public static final field TYPE Ljava/lang/String;
public final fun of (Ljava/lang/Object;)Lspace/kscience/dataforge/meta/Value;
public final fun parse (Ljava/lang/String;)Lspace/kscience/dataforge/meta/Value;
public final fun serializer ()Lkotlinx/serialization/KSerializer;
}
public final class space/kscience/dataforge/meta/ValueExtensionsKt {
public static final fun getBoolean (Lspace/kscience/dataforge/meta/Value;)Z
public static final fun getDouble (Lspace/kscience/dataforge/meta/Value;)D
public static final fun getDoubleArray (Lspace/kscience/dataforge/meta/Value;)[D
public static final fun getFloat (Lspace/kscience/dataforge/meta/Value;)F
public static final fun getInt (Lspace/kscience/dataforge/meta/Value;)I
public static final fun getLong (Lspace/kscience/dataforge/meta/Value;)J
public static final fun getShort (Lspace/kscience/dataforge/meta/Value;)S
public static final fun getStringList (Lspace/kscience/dataforge/meta/Value;)Ljava/util/List;
public static final fun isList (Lspace/kscience/dataforge/meta/Value;)Z
public static final fun isNull (Lspace/kscience/dataforge/meta/Value;)Z
public static final fun toMeta (Lspace/kscience/dataforge/meta/Value;)Lspace/kscience/dataforge/meta/Meta;
}
public final class space/kscience/dataforge/meta/ValueKt {
public static final fun ListValue ([Ljava/lang/Number;)Lspace/kscience/dataforge/meta/ListValue;
public static final fun ListValue ([Ljava/lang/String;)Lspace/kscience/dataforge/meta/ListValue;
public static final fun asValue (Ljava/lang/Enum;)Lspace/kscience/dataforge/meta/Value;
public static final fun asValue (Ljava/lang/Iterable;)Lspace/kscience/dataforge/meta/Value;
public static final fun asValue (Ljava/lang/Number;)Lspace/kscience/dataforge/meta/Value;
public static final fun asValue (Ljava/lang/String;)Lspace/kscience/dataforge/meta/Value;
public static final fun asValue (Z)Lspace/kscience/dataforge/meta/Value;
public static final fun asValue ([B)Lspace/kscience/dataforge/meta/Value;
public static final fun asValue ([F)Lspace/kscience/dataforge/meta/Value;
public static final fun asValue ([I)Lspace/kscience/dataforge/meta/Value;
public static final fun asValue ([J)Lspace/kscience/dataforge/meta/Value;
public static final fun asValue ([S)Lspace/kscience/dataforge/meta/Value;
public static final fun getNumber (Lspace/kscience/dataforge/meta/Value;)Ljava/lang/Number;
public static final fun getNumberOrNull (Lspace/kscience/dataforge/meta/Value;)Ljava/lang/Number;
public static final fun getString (Lspace/kscience/dataforge/meta/Value;)Ljava/lang/String;
public static final fun parseValue (Ljava/lang/String;)Lspace/kscience/dataforge/meta/Value;
}
public abstract interface class space/kscience/dataforge/meta/ValueProvider {
public abstract fun getValue (Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/Value;
}
public final class space/kscience/dataforge/meta/ValueProviderKt {
public static final fun getValue (Lspace/kscience/dataforge/meta/ValueProvider;Ljava/lang/String;)Lspace/kscience/dataforge/meta/Value;
public static final fun set (Lspace/kscience/dataforge/meta/MutableValueProvider;Ljava/lang/String;Ljava/lang/Number;)V
public static final fun set (Lspace/kscience/dataforge/meta/MutableValueProvider;Ljava/lang/String;Ljava/lang/String;)V
public static final fun set (Lspace/kscience/dataforge/meta/MutableValueProvider;Ljava/lang/String;Ljava/util/List;)V
public static final fun set (Lspace/kscience/dataforge/meta/MutableValueProvider;Ljava/lang/String;Lspace/kscience/dataforge/meta/Value;)V
public static final fun set (Lspace/kscience/dataforge/meta/MutableValueProvider;Ljava/lang/String;Z)V
public static final fun set (Lspace/kscience/dataforge/meta/MutableValueProvider;Lspace/kscience/dataforge/names/Name;Ljava/lang/Number;)V
public static final fun set (Lspace/kscience/dataforge/meta/MutableValueProvider;Lspace/kscience/dataforge/names/Name;Ljava/lang/String;)V
public static final fun set (Lspace/kscience/dataforge/meta/MutableValueProvider;Lspace/kscience/dataforge/names/Name;Ljava/util/List;)V
public static final fun set (Lspace/kscience/dataforge/meta/MutableValueProvider;Lspace/kscience/dataforge/names/Name;Z)V
public static final fun set (Lspace/kscience/dataforge/meta/MutableValueProvider;Lspace/kscience/dataforge/names/NameToken;Ljava/lang/Number;)V
public static final fun set (Lspace/kscience/dataforge/meta/MutableValueProvider;Lspace/kscience/dataforge/names/NameToken;Ljava/lang/String;)V
public static final fun set (Lspace/kscience/dataforge/meta/MutableValueProvider;Lspace/kscience/dataforge/names/NameToken;Ljava/util/List;)V
public static final fun set (Lspace/kscience/dataforge/meta/MutableValueProvider;Lspace/kscience/dataforge/names/NameToken;Lspace/kscience/dataforge/meta/Value;)V
public static final fun set (Lspace/kscience/dataforge/meta/MutableValueProvider;Lspace/kscience/dataforge/names/NameToken;Z)V
public static final fun setValue (Lspace/kscience/dataforge/meta/MutableValueProvider;Ljava/lang/String;Lspace/kscience/dataforge/meta/Value;)V
}
public final class space/kscience/dataforge/meta/ValueSerializer : kotlinx/serialization/KSerializer {
public static final field INSTANCE Lspace/kscience/dataforge/meta/ValueSerializer;
public synthetic fun deserialize (Lkotlinx/serialization/encoding/Decoder;)Ljava/lang/Object;
public fun deserialize (Lkotlinx/serialization/encoding/Decoder;)Lspace/kscience/dataforge/meta/Value;
public fun getDescriptor ()Lkotlinx/serialization/descriptors/SerialDescriptor;
public synthetic fun serialize (Lkotlinx/serialization/encoding/Encoder;Ljava/lang/Object;)V
public fun serialize (Lkotlinx/serialization/encoding/Encoder;Lspace/kscience/dataforge/meta/Value;)V
}
public final class space/kscience/dataforge/meta/ValueType : java/lang/Enum {
public static final field BOOLEAN Lspace/kscience/dataforge/meta/ValueType;
public static final field Companion Lspace/kscience/dataforge/meta/ValueType$Companion;
public static final field LIST Lspace/kscience/dataforge/meta/ValueType;
public static final field NULL Lspace/kscience/dataforge/meta/ValueType;
public static final field NUMBER Lspace/kscience/dataforge/meta/ValueType;
public static final field STRING Lspace/kscience/dataforge/meta/ValueType;
public static fun getEntries ()Lkotlin/enums/EnumEntries;
public static fun valueOf (Ljava/lang/String;)Lspace/kscience/dataforge/meta/ValueType;
public static fun values ()[Lspace/kscience/dataforge/meta/ValueType;
}
public final class space/kscience/dataforge/meta/ValueType$Companion {
public final fun serializer ()Lkotlinx/serialization/KSerializer;
}
public abstract interface class space/kscience/dataforge/meta/descriptors/Described {
public abstract fun getDescriptor ()Lspace/kscience/dataforge/meta/descriptors/MetaDescriptor;
}
@ -446,37 +672,34 @@ public abstract interface class space/kscience/dataforge/meta/descriptors/Descri
public final class space/kscience/dataforge/meta/descriptors/MetaDescriptor {
public static final field Companion Lspace/kscience/dataforge/meta/descriptors/MetaDescriptor$Companion;
public fun <init> ()V
public synthetic fun <init> (ILjava/lang/String;Ljava/util/Map;ZLspace/kscience/dataforge/meta/descriptors/ValueRequirement;Ljava/util/List;Ljava/lang/String;Lspace/kscience/dataforge/values/Value;Lspace/kscience/dataforge/meta/Meta;Lkotlinx/serialization/internal/SerializationConstructorMarker;)V
public fun <init> (Ljava/lang/String;Ljava/util/Map;ZLspace/kscience/dataforge/meta/descriptors/ValueRequirement;Ljava/util/List;Ljava/lang/String;Lspace/kscience/dataforge/values/Value;Lspace/kscience/dataforge/meta/Meta;)V
public synthetic fun <init> (Ljava/lang/String;Ljava/util/Map;ZLspace/kscience/dataforge/meta/descriptors/ValueRequirement;Ljava/util/List;Ljava/lang/String;Lspace/kscience/dataforge/values/Value;Lspace/kscience/dataforge/meta/Meta;ILkotlin/jvm/internal/DefaultConstructorMarker;)V
public fun <init> (Ljava/lang/String;Ljava/util/Map;ZLspace/kscience/dataforge/meta/descriptors/ValueRestriction;Ljava/util/List;Ljava/lang/String;Lspace/kscience/dataforge/meta/Value;Lspace/kscience/dataforge/meta/Meta;)V
public synthetic fun <init> (Ljava/lang/String;Ljava/util/Map;ZLspace/kscience/dataforge/meta/descriptors/ValueRestriction;Ljava/util/List;Ljava/lang/String;Lspace/kscience/dataforge/meta/Value;Lspace/kscience/dataforge/meta/Meta;ILkotlin/jvm/internal/DefaultConstructorMarker;)V
public final fun component1 ()Ljava/lang/String;
public final fun component2 ()Ljava/util/Map;
public final fun component3 ()Z
public final fun component4 ()Lspace/kscience/dataforge/meta/descriptors/ValueRequirement;
public final fun component4 ()Lspace/kscience/dataforge/meta/descriptors/ValueRestriction;
public final fun component5 ()Ljava/util/List;
public final fun component6 ()Ljava/lang/String;
public final fun component7 ()Lspace/kscience/dataforge/values/Value;
public final fun component7 ()Lspace/kscience/dataforge/meta/Value;
public final fun component8 ()Lspace/kscience/dataforge/meta/Meta;
public final fun copy (Ljava/lang/String;Ljava/util/Map;ZLspace/kscience/dataforge/meta/descriptors/ValueRequirement;Ljava/util/List;Ljava/lang/String;Lspace/kscience/dataforge/values/Value;Lspace/kscience/dataforge/meta/Meta;)Lspace/kscience/dataforge/meta/descriptors/MetaDescriptor;
public static synthetic fun copy$default (Lspace/kscience/dataforge/meta/descriptors/MetaDescriptor;Ljava/lang/String;Ljava/util/Map;ZLspace/kscience/dataforge/meta/descriptors/ValueRequirement;Ljava/util/List;Ljava/lang/String;Lspace/kscience/dataforge/values/Value;Lspace/kscience/dataforge/meta/Meta;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/descriptors/MetaDescriptor;
public final fun copy (Ljava/lang/String;Ljava/util/Map;ZLspace/kscience/dataforge/meta/descriptors/ValueRestriction;Ljava/util/List;Ljava/lang/String;Lspace/kscience/dataforge/meta/Value;Lspace/kscience/dataforge/meta/Meta;)Lspace/kscience/dataforge/meta/descriptors/MetaDescriptor;
public static synthetic fun copy$default (Lspace/kscience/dataforge/meta/descriptors/MetaDescriptor;Ljava/lang/String;Ljava/util/Map;ZLspace/kscience/dataforge/meta/descriptors/ValueRestriction;Ljava/util/List;Ljava/lang/String;Lspace/kscience/dataforge/meta/Value;Lspace/kscience/dataforge/meta/Meta;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/descriptors/MetaDescriptor;
public fun equals (Ljava/lang/Object;)Z
public final fun getAttributes ()Lspace/kscience/dataforge/meta/Meta;
public final fun getChildren ()Ljava/util/Map;
public final fun getDefaultNode ()Lspace/kscience/dataforge/meta/Meta;
public final fun getDefaultValue ()Lspace/kscience/dataforge/values/Value;
public final fun getDefaultValue ()Lspace/kscience/dataforge/meta/Value;
public final fun getDescription ()Ljava/lang/String;
public final fun getIndexKey ()Ljava/lang/String;
public final fun getInfo ()Ljava/lang/String;
public final fun getMultiple ()Z
public final fun getValueRequirement ()Lspace/kscience/dataforge/meta/descriptors/ValueRequirement;
public final fun getValueRestriction ()Lspace/kscience/dataforge/meta/descriptors/ValueRestriction;
public final fun getValueTypes ()Ljava/util/List;
public fun hashCode ()I
public fun toString ()Ljava/lang/String;
public static final fun write$Self (Lspace/kscience/dataforge/meta/descriptors/MetaDescriptor;Lkotlinx/serialization/encoding/CompositeEncoder;Lkotlinx/serialization/descriptors/SerialDescriptor;)V
}
public final class space/kscience/dataforge/meta/descriptors/MetaDescriptor$$serializer : kotlinx/serialization/internal/GeneratedSerializer {
public static final field INSTANCE Lspace/kscience/dataforge/meta/descriptors/MetaDescriptor$$serializer;
public static final synthetic field descriptor Lkotlinx/serialization/descriptors/SerialDescriptor;
public fun childSerializers ()[Lkotlinx/serialization/KSerializer;
public synthetic fun deserialize (Lkotlinx/serialization/encoding/Decoder;)Ljava/lang/Object;
public fun deserialize (Lkotlinx/serialization/encoding/Decoder;)Lspace/kscience/dataforge/meta/descriptors/MetaDescriptor;
@ -487,22 +710,25 @@ public final class space/kscience/dataforge/meta/descriptors/MetaDescriptor$$ser
}
public final class space/kscience/dataforge/meta/descriptors/MetaDescriptor$Companion {
public final fun getEMPTY ()Lspace/kscience/dataforge/meta/descriptors/MetaDescriptor;
public final fun serializer ()Lkotlinx/serialization/KSerializer;
}
public final class space/kscience/dataforge/meta/descriptors/MetaDescriptorBuilder {
public fun <init> ()V
public final fun allowedValues ([Ljava/lang/Object;)V
public final fun attributes (Lkotlin/jvm/functions/Function1;)V
public final fun build ()Lspace/kscience/dataforge/meta/descriptors/MetaDescriptor;
public final fun default (Ljava/lang/Object;)V
public final fun getAllowedValues ()Ljava/util/List;
public final fun getAttributes ()Lspace/kscience/dataforge/meta/MutableMeta;
public final fun getChildren ()Ljava/util/Map;
public final fun getDefault ()Lspace/kscience/dataforge/values/Value;
public final fun getDefault ()Lspace/kscience/dataforge/meta/Value;
public final fun getIndexKey ()Ljava/lang/String;
public final fun getInfo ()Ljava/lang/String;
public final fun getMultiple ()Z
public final fun getType ()Ljava/util/List;
public final fun getValueRequirement ()Lspace/kscience/dataforge/meta/descriptors/ValueRequirement;
public final fun getValueRestriction ()Lspace/kscience/dataforge/meta/descriptors/ValueRestriction;
public final fun getValueTypes ()Ljava/util/List;
public final fun item (Lspace/kscience/dataforge/names/Name;Lkotlin/jvm/functions/Function1;)Lspace/kscience/dataforge/meta/descriptors/MetaDescriptorBuilder;
public static synthetic fun item$default (Lspace/kscience/dataforge/meta/descriptors/MetaDescriptorBuilder;Lspace/kscience/dataforge/names/Name;Lkotlin/jvm/functions/Function1;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/descriptors/MetaDescriptorBuilder;
public final fun node (Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/descriptors/MetaDescriptor;Lkotlin/jvm/functions/Function1;)Lspace/kscience/dataforge/meta/descriptors/MetaDescriptorBuilder;
@ -510,13 +736,13 @@ public final class space/kscience/dataforge/meta/descriptors/MetaDescriptorBuild
public final fun setAllowedValues (Ljava/util/List;)V
public final fun setAttributes (Lspace/kscience/dataforge/meta/MutableMeta;)V
public final fun setChildren (Ljava/util/Map;)V
public final fun setDefault (Lspace/kscience/dataforge/values/Value;)V
public final fun setDefault (Lspace/kscience/dataforge/meta/Value;)V
public final fun setIndexKey (Ljava/lang/String;)V
public final fun setInfo (Ljava/lang/String;)V
public final fun setMultiple (Z)V
public final fun setType (Ljava/util/List;)V
public final fun setValueRequirement (Lspace/kscience/dataforge/meta/descriptors/ValueRequirement;)V
public final fun type (Lspace/kscience/dataforge/values/ValueType;[Lspace/kscience/dataforge/values/ValueType;)V
public final fun setValueRestriction (Lspace/kscience/dataforge/meta/descriptors/ValueRestriction;)V
public final fun setValueTypes (Ljava/util/List;)V
public final fun valueType (Lspace/kscience/dataforge/meta/ValueType;[Lspace/kscience/dataforge/meta/ValueType;)V
}
public final class space/kscience/dataforge/meta/descriptors/MetaDescriptorBuilderKt {
@ -529,10 +755,10 @@ public final class space/kscience/dataforge/meta/descriptors/MetaDescriptorBuild
public static final fun node (Lspace/kscience/dataforge/meta/descriptors/MetaDescriptorBuilder;Lspace/kscience/dataforge/names/Name;Lkotlin/jvm/functions/Function1;)Lspace/kscience/dataforge/meta/descriptors/MetaDescriptorBuilder;
public static synthetic fun node$default (Lspace/kscience/dataforge/meta/descriptors/MetaDescriptorBuilder;Ljava/lang/String;Lspace/kscience/dataforge/meta/descriptors/Described;Lkotlin/jvm/functions/Function1;ILjava/lang/Object;)V
public static final fun required (Lspace/kscience/dataforge/meta/descriptors/MetaDescriptorBuilder;)V
public static final fun value (Lspace/kscience/dataforge/meta/descriptors/MetaDescriptorBuilder;Ljava/lang/String;Lspace/kscience/dataforge/values/ValueType;[Lspace/kscience/dataforge/values/ValueType;Lkotlin/jvm/functions/Function1;)Lspace/kscience/dataforge/meta/descriptors/MetaDescriptorBuilder;
public static final fun value (Lspace/kscience/dataforge/meta/descriptors/MetaDescriptorBuilder;Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/values/ValueType;[Lspace/kscience/dataforge/values/ValueType;Lkotlin/jvm/functions/Function1;)Lspace/kscience/dataforge/meta/descriptors/MetaDescriptorBuilder;
public static synthetic fun value$default (Lspace/kscience/dataforge/meta/descriptors/MetaDescriptorBuilder;Ljava/lang/String;Lspace/kscience/dataforge/values/ValueType;[Lspace/kscience/dataforge/values/ValueType;Lkotlin/jvm/functions/Function1;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/descriptors/MetaDescriptorBuilder;
public static synthetic fun value$default (Lspace/kscience/dataforge/meta/descriptors/MetaDescriptorBuilder;Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/values/ValueType;[Lspace/kscience/dataforge/values/ValueType;Lkotlin/jvm/functions/Function1;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/descriptors/MetaDescriptorBuilder;
public static final fun value (Lspace/kscience/dataforge/meta/descriptors/MetaDescriptorBuilder;Ljava/lang/String;Lspace/kscience/dataforge/meta/ValueType;[Lspace/kscience/dataforge/meta/ValueType;Lkotlin/jvm/functions/Function1;)Lspace/kscience/dataforge/meta/descriptors/MetaDescriptorBuilder;
public static final fun value (Lspace/kscience/dataforge/meta/descriptors/MetaDescriptorBuilder;Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/ValueType;[Lspace/kscience/dataforge/meta/ValueType;Lkotlin/jvm/functions/Function1;)Lspace/kscience/dataforge/meta/descriptors/MetaDescriptorBuilder;
public static synthetic fun value$default (Lspace/kscience/dataforge/meta/descriptors/MetaDescriptorBuilder;Ljava/lang/String;Lspace/kscience/dataforge/meta/ValueType;[Lspace/kscience/dataforge/meta/ValueType;Lkotlin/jvm/functions/Function1;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/descriptors/MetaDescriptorBuilder;
public static synthetic fun value$default (Lspace/kscience/dataforge/meta/descriptors/MetaDescriptorBuilder;Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/ValueType;[Lspace/kscience/dataforge/meta/ValueType;Lkotlin/jvm/functions/Function1;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/descriptors/MetaDescriptorBuilder;
}
public final class space/kscience/dataforge/meta/descriptors/MetaDescriptorKt {
@ -541,15 +767,16 @@ public final class space/kscience/dataforge/meta/descriptors/MetaDescriptorKt {
public static final fun getAllowedValues (Lspace/kscience/dataforge/meta/descriptors/MetaDescriptor;)Ljava/util/List;
public static final fun getRequired (Lspace/kscience/dataforge/meta/descriptors/MetaDescriptor;)Z
public static final fun validate (Lspace/kscience/dataforge/meta/descriptors/MetaDescriptor;Lspace/kscience/dataforge/meta/Meta;)Z
public static final fun validate (Lspace/kscience/dataforge/meta/descriptors/MetaDescriptor;Lspace/kscience/dataforge/values/Value;)Z
public static final fun validate (Lspace/kscience/dataforge/meta/descriptors/MetaDescriptor;Lspace/kscience/dataforge/meta/Value;)Z
}
public final class space/kscience/dataforge/meta/descriptors/ValueRequirement : java/lang/Enum {
public static final field ABSENT Lspace/kscience/dataforge/meta/descriptors/ValueRequirement;
public static final field NONE Lspace/kscience/dataforge/meta/descriptors/ValueRequirement;
public static final field REQUIRED Lspace/kscience/dataforge/meta/descriptors/ValueRequirement;
public static fun valueOf (Ljava/lang/String;)Lspace/kscience/dataforge/meta/descriptors/ValueRequirement;
public static fun values ()[Lspace/kscience/dataforge/meta/descriptors/ValueRequirement;
public final class space/kscience/dataforge/meta/descriptors/ValueRestriction : java/lang/Enum {
public static final field ABSENT Lspace/kscience/dataforge/meta/descriptors/ValueRestriction;
public static final field NONE Lspace/kscience/dataforge/meta/descriptors/ValueRestriction;
public static final field REQUIRED Lspace/kscience/dataforge/meta/descriptors/ValueRestriction;
public static fun getEntries ()Lkotlin/enums/EnumEntries;
public static fun valueOf (Ljava/lang/String;)Lspace/kscience/dataforge/meta/descriptors/ValueRestriction;
public static fun values ()[Lspace/kscience/dataforge/meta/descriptors/ValueRestriction;
}
public final class space/kscience/dataforge/meta/transformations/KeepTransformationRule : space/kscience/dataforge/meta/transformations/TransformationRule {
@ -568,7 +795,10 @@ public final class space/kscience/dataforge/meta/transformations/KeepTransformat
public abstract interface class space/kscience/dataforge/meta/transformations/MetaConverter {
public static final field Companion Lspace/kscience/dataforge/meta/transformations/MetaConverter$Companion;
public abstract fun metaToObject (Lspace/kscience/dataforge/meta/Meta;)Ljava/lang/Object;
public fun getDescriptor ()Lspace/kscience/dataforge/meta/descriptors/MetaDescriptor;
public abstract fun getType ()Lkotlin/reflect/KType;
public fun metaToObject (Lspace/kscience/dataforge/meta/Meta;)Ljava/lang/Object;
public abstract fun metaToObjectOrNull (Lspace/kscience/dataforge/meta/Meta;)Ljava/lang/Object;
public abstract fun objectToMeta (Ljava/lang/Object;)Lspace/kscience/dataforge/meta/Meta;
}
@ -589,7 +819,7 @@ public final class space/kscience/dataforge/meta/transformations/MetaConverter$C
public final class space/kscience/dataforge/meta/transformations/MetaConverterKt {
public static final fun nullableMetaToObject (Lspace/kscience/dataforge/meta/transformations/MetaConverter;Lspace/kscience/dataforge/meta/Meta;)Ljava/lang/Object;
public static final fun nullableObjectToMeta (Lspace/kscience/dataforge/meta/transformations/MetaConverter;Ljava/lang/Object;)Lspace/kscience/dataforge/meta/Meta;
public static final fun valueToObject (Lspace/kscience/dataforge/meta/transformations/MetaConverter;Lspace/kscience/dataforge/values/Value;)Ljava/lang/Object;
public static final fun valueToObject (Lspace/kscience/dataforge/meta/transformations/MetaConverter;Lspace/kscience/dataforge/meta/Value;)Ljava/lang/Object;
}
public final class space/kscience/dataforge/meta/transformations/MetaTransformation {
@ -673,6 +903,10 @@ public abstract interface annotation class space/kscience/dataforge/misc/DFExper
public abstract interface annotation class space/kscience/dataforge/misc/DFInternal : java/lang/annotation/Annotation {
}
public abstract interface annotation class space/kscience/dataforge/misc/DfId : java/lang/annotation/Annotation {
public abstract fun id ()Ljava/lang/String;
}
public abstract interface class space/kscience/dataforge/misc/Named {
public static final field Companion Lspace/kscience/dataforge/misc/Named$Companion;
public abstract fun getName ()Lspace/kscience/dataforge/names/Name;
@ -686,10 +920,6 @@ public final class space/kscience/dataforge/misc/NamedKt {
public static final fun isAnonymous (Lspace/kscience/dataforge/misc/Named;)Z
}
public abstract interface annotation class space/kscience/dataforge/misc/Type : java/lang/annotation/Annotation {
public abstract fun id ()Ljava/lang/String;
}
public final class space/kscience/dataforge/names/Name {
public static final field Companion Lspace/kscience/dataforge/names/Name$Companion;
public static final field NAME_SEPARATOR Ljava/lang/String;
@ -702,14 +932,13 @@ public final class space/kscience/dataforge/names/Name {
public final class space/kscience/dataforge/names/Name$Companion {
public final fun getEMPTY ()Lspace/kscience/dataforge/names/Name;
public final fun getMATCH_ALL_TOKEN ()Lspace/kscience/dataforge/names/NameToken;
public final fun getMATCH_ANY_TOKEN ()Lspace/kscience/dataforge/names/NameToken;
public final fun of ([Ljava/lang/String;)Lspace/kscience/dataforge/names/Name;
public final fun parse (Ljava/lang/String;)Lspace/kscience/dataforge/names/Name;
public final fun serializer ()Lkotlinx/serialization/KSerializer;
}
public final class space/kscience/dataforge/names/NameKt {
public static final fun appendFirst (Lspace/kscience/dataforge/names/Name;Ljava/lang/String;)Lspace/kscience/dataforge/names/Name;
public static final fun appendLeft (Lspace/kscience/dataforge/names/Name;Ljava/lang/String;)Lspace/kscience/dataforge/names/Name;
public static final fun asName (Ljava/lang/String;)Lspace/kscience/dataforge/names/Name;
public static final fun asName (Lspace/kscience/dataforge/names/NameToken;)Lspace/kscience/dataforge/names/Name;
@ -725,12 +954,17 @@ public final class space/kscience/dataforge/names/NameKt {
public static synthetic fun get$default (Ljava/util/Map;Ljava/lang/String;Ljava/lang/String;ILjava/lang/Object;)Ljava/lang/Object;
public static final fun getLength (Lspace/kscience/dataforge/names/Name;)I
public static final fun isEmpty (Lspace/kscience/dataforge/names/Name;)Z
public static final fun last (Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/names/NameToken;
public static final fun lastOrNull (Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/names/NameToken;
public static final fun parseAsName (Ljava/lang/String;Z)Lspace/kscience/dataforge/names/Name;
public static synthetic fun parseAsName$default (Ljava/lang/String;ZILjava/lang/Object;)Lspace/kscience/dataforge/names/Name;
public static final fun plus (Lspace/kscience/dataforge/names/Name;Ljava/lang/String;)Lspace/kscience/dataforge/names/Name;
public static final fun plus (Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/names/Name;
public static final fun plus (Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/names/NameToken;)Lspace/kscience/dataforge/names/Name;
public static final fun plus (Lspace/kscience/dataforge/names/NameToken;Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/names/Name;
public static final fun removeFirstOrNull (Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/names/Name;
public static final fun removeHeadOrNull (Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/names/Name;
public static final fun replaceLast (Lspace/kscience/dataforge/names/Name;Lkotlin/jvm/functions/Function1;)Lspace/kscience/dataforge/names/Name;
public static final fun set (Ljava/util/Map;Ljava/lang/String;Ljava/lang/Object;)V
public static final fun startsWith (Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/names/Name;)Z
public static final fun startsWith (Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/names/NameToken;)Z
@ -754,18 +988,16 @@ public final class space/kscience/dataforge/names/NameToken {
public static final field Companion Lspace/kscience/dataforge/names/NameToken$Companion;
public fun <init> (Ljava/lang/String;Ljava/lang/String;)V
public synthetic fun <init> (Ljava/lang/String;Ljava/lang/String;ILkotlin/jvm/internal/DefaultConstructorMarker;)V
public final fun component1 ()Ljava/lang/String;
public final fun component2 ()Ljava/lang/String;
public final fun copy (Ljava/lang/String;Ljava/lang/String;)Lspace/kscience/dataforge/names/NameToken;
public static synthetic fun copy$default (Lspace/kscience/dataforge/names/NameToken;Ljava/lang/String;Ljava/lang/String;ILjava/lang/Object;)Lspace/kscience/dataforge/names/NameToken;
public fun equals (Ljava/lang/Object;)Z
public final fun getBody ()Ljava/lang/String;
public final fun getIndex ()Ljava/lang/String;
public fun hashCode ()I
public fun toString ()Ljava/lang/String;
public final fun toStringUnescaped ()Ljava/lang/String;
}
public final class space/kscience/dataforge/names/NameToken$Companion {
public final fun parse (Ljava/lang/String;)Lspace/kscience/dataforge/names/NameToken;
public final fun serializer ()Lkotlinx/serialization/KSerializer;
}
@ -783,210 +1015,3 @@ public final class space/kscience/dataforge/names/NameTokenSerializer : kotlinx/
public fun serialize (Lkotlinx/serialization/encoding/Encoder;Lspace/kscience/dataforge/names/NameToken;)V
}
public final class space/kscience/dataforge/values/DoubleArrayValue : java/lang/Iterable, kotlin/jvm/internal/markers/KMappedMarker, space/kscience/dataforge/values/Value {
public fun <init> ([D)V
public fun equals (Ljava/lang/Object;)Z
public fun getList ()Ljava/util/List;
public fun getType ()Lspace/kscience/dataforge/values/ValueType;
public synthetic fun getValue ()Ljava/lang/Object;
public fun getValue ()[D
public fun hashCode ()I
public fun iterator ()Ljava/util/Iterator;
public fun toString ()Ljava/lang/String;
}
public final class space/kscience/dataforge/values/EnumValue : space/kscience/dataforge/values/Value {
public fun <init> (Ljava/lang/Enum;)V
public fun equals (Ljava/lang/Object;)Z
public fun getType ()Lspace/kscience/dataforge/values/ValueType;
public fun getValue ()Ljava/lang/Enum;
public synthetic fun getValue ()Ljava/lang/Object;
public fun hashCode ()I
public fun toString ()Ljava/lang/String;
}
public final class space/kscience/dataforge/values/ExoticValuesKt {
public static final fun asValue ([D)Lspace/kscience/dataforge/values/Value;
public static final fun lazyParseValue (Ljava/lang/String;)Lspace/kscience/dataforge/values/LazyParsedValue;
}
public final class space/kscience/dataforge/values/False : space/kscience/dataforge/values/Value {
public static final field INSTANCE Lspace/kscience/dataforge/values/False;
public fun equals (Ljava/lang/Object;)Z
public fun getType ()Lspace/kscience/dataforge/values/ValueType;
public fun getValue ()Ljava/lang/Object;
public fun hashCode ()I
public fun toString ()Ljava/lang/String;
}
public final class space/kscience/dataforge/values/LazyParsedValue : space/kscience/dataforge/values/Value {
public fun <init> (Ljava/lang/String;)V
public fun equals (Ljava/lang/Object;)Z
public final fun getString ()Ljava/lang/String;
public fun getType ()Lspace/kscience/dataforge/values/ValueType;
public fun getValue ()Ljava/lang/Object;
public fun hashCode ()I
public fun toString ()Ljava/lang/String;
}
public final class space/kscience/dataforge/values/ListValue : java/lang/Iterable, kotlin/jvm/internal/markers/KMappedMarker, space/kscience/dataforge/values/Value {
public static final field Companion Lspace/kscience/dataforge/values/ListValue$Companion;
public fun <init> (Ljava/util/List;)V
public fun equals (Ljava/lang/Object;)Z
public fun getList ()Ljava/util/List;
public fun getType ()Lspace/kscience/dataforge/values/ValueType;
public synthetic fun getValue ()Ljava/lang/Object;
public fun getValue ()Ljava/util/List;
public fun hashCode ()I
public fun iterator ()Ljava/util/Iterator;
public fun toString ()Ljava/lang/String;
}
public final class space/kscience/dataforge/values/ListValue$Companion {
public final fun getEMPTY ()Lspace/kscience/dataforge/values/ListValue;
}
public abstract interface class space/kscience/dataforge/values/MutableValueProvider : space/kscience/dataforge/values/ValueProvider {
public abstract fun setValue (Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/values/Value;)V
}
public final class space/kscience/dataforge/values/Null : space/kscience/dataforge/values/Value {
public static final field INSTANCE Lspace/kscience/dataforge/values/Null;
public fun equals (Ljava/lang/Object;)Z
public fun getType ()Lspace/kscience/dataforge/values/ValueType;
public fun getValue ()Ljava/lang/Object;
public fun hashCode ()I
public fun toString ()Ljava/lang/String;
}
public final class space/kscience/dataforge/values/NumberValue : space/kscience/dataforge/values/Value {
public fun <init> (Ljava/lang/Number;)V
public fun equals (Ljava/lang/Object;)Z
public final fun getNumber ()Ljava/lang/Number;
public fun getType ()Lspace/kscience/dataforge/values/ValueType;
public fun getValue ()Ljava/lang/Object;
public fun hashCode ()I
public fun toString ()Ljava/lang/String;
}
public final class space/kscience/dataforge/values/StringValue : space/kscience/dataforge/values/Value {
public static final synthetic fun box-impl (Ljava/lang/String;)Lspace/kscience/dataforge/values/StringValue;
public static fun constructor-impl (Ljava/lang/String;)Ljava/lang/String;
public fun equals (Ljava/lang/Object;)Z
public static fun equals-impl (Ljava/lang/String;Ljava/lang/Object;)Z
public static final fun equals-impl0 (Ljava/lang/String;Ljava/lang/String;)Z
public final fun getString ()Ljava/lang/String;
public fun getType ()Lspace/kscience/dataforge/values/ValueType;
public static fun getType-impl (Ljava/lang/String;)Lspace/kscience/dataforge/values/ValueType;
public fun getValue ()Ljava/lang/Object;
public static fun getValue-impl (Ljava/lang/String;)Ljava/lang/Object;
public fun hashCode ()I
public static fun hashCode-impl (Ljava/lang/String;)I
public fun toString ()Ljava/lang/String;
public static fun toString-impl (Ljava/lang/String;)Ljava/lang/String;
public final synthetic fun unbox-impl ()Ljava/lang/String;
}
public final class space/kscience/dataforge/values/True : space/kscience/dataforge/values/Value {
public static final field INSTANCE Lspace/kscience/dataforge/values/True;
public fun equals (Ljava/lang/Object;)Z
public fun getType ()Lspace/kscience/dataforge/values/ValueType;
public fun getValue ()Ljava/lang/Object;
public fun hashCode ()I
public fun toString ()Ljava/lang/String;
}
public abstract interface class space/kscience/dataforge/values/Value {
public static final field Companion Lspace/kscience/dataforge/values/Value$Companion;
public static final field TYPE Ljava/lang/String;
public abstract fun equals (Ljava/lang/Object;)Z
public fun getList ()Ljava/util/List;
public abstract fun getType ()Lspace/kscience/dataforge/values/ValueType;
public abstract fun getValue ()Ljava/lang/Object;
public abstract fun hashCode ()I
public abstract fun toString ()Ljava/lang/String;
}
public final class space/kscience/dataforge/values/Value$Companion {
public static final field TYPE Ljava/lang/String;
public final fun of (Ljava/lang/Object;)Lspace/kscience/dataforge/values/Value;
}
public final class space/kscience/dataforge/values/ValueExtensionsKt {
public static final fun getBoolean (Lspace/kscience/dataforge/values/Value;)Z
public static final fun getDouble (Lspace/kscience/dataforge/values/Value;)D
public static final fun getDoubleArray (Lspace/kscience/dataforge/values/Value;)[D
public static final fun getFloat (Lspace/kscience/dataforge/values/Value;)F
public static final fun getInt (Lspace/kscience/dataforge/values/Value;)I
public static final fun getLong (Lspace/kscience/dataforge/values/Value;)J
public static final fun getShort (Lspace/kscience/dataforge/values/Value;)S
public static final fun getStringList (Lspace/kscience/dataforge/values/Value;)Ljava/util/List;
public static final fun isList (Lspace/kscience/dataforge/values/Value;)Z
public static final fun isNull (Lspace/kscience/dataforge/values/Value;)Z
public static final fun toMeta (Lspace/kscience/dataforge/values/Value;)Lspace/kscience/dataforge/meta/Meta;
}
public final class space/kscience/dataforge/values/ValueKt {
public static final fun ListValue ([Ljava/lang/Number;)Lspace/kscience/dataforge/values/ListValue;
public static final fun ListValue ([Ljava/lang/String;)Lspace/kscience/dataforge/values/ListValue;
public static final fun asValue (Ljava/lang/Enum;)Lspace/kscience/dataforge/values/Value;
public static final fun asValue (Ljava/lang/Iterable;)Lspace/kscience/dataforge/values/Value;
public static final fun asValue (Ljava/lang/Number;)Lspace/kscience/dataforge/values/Value;
public static final fun asValue (Ljava/lang/String;)Lspace/kscience/dataforge/values/Value;
public static final fun asValue (Z)Lspace/kscience/dataforge/values/Value;
public static final fun asValue ([B)Lspace/kscience/dataforge/values/Value;
public static final fun asValue ([F)Lspace/kscience/dataforge/values/Value;
public static final fun asValue ([I)Lspace/kscience/dataforge/values/Value;
public static final fun asValue ([J)Lspace/kscience/dataforge/values/Value;
public static final fun asValue ([S)Lspace/kscience/dataforge/values/Value;
public static final fun getNumber (Lspace/kscience/dataforge/values/Value;)Ljava/lang/Number;
public static final fun getNumberOrNull (Lspace/kscience/dataforge/values/Value;)Ljava/lang/Number;
public static final fun getString (Lspace/kscience/dataforge/values/Value;)Ljava/lang/String;
public static final fun parseValue (Ljava/lang/String;)Lspace/kscience/dataforge/values/Value;
}
public abstract interface class space/kscience/dataforge/values/ValueProvider {
public abstract fun getValue (Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/values/Value;
}
public final class space/kscience/dataforge/values/ValueProviderKt {
public static final fun getValue (Lspace/kscience/dataforge/values/ValueProvider;Ljava/lang/String;)Lspace/kscience/dataforge/values/Value;
public static final fun setValue (Lspace/kscience/dataforge/values/MutableValueProvider;Ljava/lang/String;Lspace/kscience/dataforge/values/Value;)V
}
public final class space/kscience/dataforge/values/ValueSerializer : kotlinx/serialization/KSerializer {
public static final field INSTANCE Lspace/kscience/dataforge/values/ValueSerializer;
public synthetic fun deserialize (Lkotlinx/serialization/encoding/Decoder;)Ljava/lang/Object;
public fun deserialize (Lkotlinx/serialization/encoding/Decoder;)Lspace/kscience/dataforge/values/Value;
public fun getDescriptor ()Lkotlinx/serialization/descriptors/SerialDescriptor;
public synthetic fun serialize (Lkotlinx/serialization/encoding/Encoder;Ljava/lang/Object;)V
public fun serialize (Lkotlinx/serialization/encoding/Encoder;Lspace/kscience/dataforge/values/Value;)V
}
public final class space/kscience/dataforge/values/ValueType : java/lang/Enum {
public static final field BOOLEAN Lspace/kscience/dataforge/values/ValueType;
public static final field Companion Lspace/kscience/dataforge/values/ValueType$Companion;
public static final field LIST Lspace/kscience/dataforge/values/ValueType;
public static final field NULL Lspace/kscience/dataforge/values/ValueType;
public static final field NUMBER Lspace/kscience/dataforge/values/ValueType;
public static final field STRING Lspace/kscience/dataforge/values/ValueType;
public static fun valueOf (Ljava/lang/String;)Lspace/kscience/dataforge/values/ValueType;
public static fun values ()[Lspace/kscience/dataforge/values/ValueType;
}
public final class space/kscience/dataforge/values/ValueType$$serializer : kotlinx/serialization/internal/GeneratedSerializer {
public static final field INSTANCE Lspace/kscience/dataforge/values/ValueType$$serializer;
public static final synthetic field descriptor Lkotlinx/serialization/descriptors/SerialDescriptor;
public fun childSerializers ()[Lkotlinx/serialization/KSerializer;
public synthetic fun deserialize (Lkotlinx/serialization/encoding/Decoder;)Ljava/lang/Object;
public fun deserialize (Lkotlinx/serialization/encoding/Decoder;)Lspace/kscience/dataforge/values/ValueType;
public fun getDescriptor ()Lkotlinx/serialization/descriptors/SerialDescriptor;
public synthetic fun serialize (Lkotlinx/serialization/encoding/Encoder;Ljava/lang/Object;)V
public fun serialize (Lkotlinx/serialization/encoding/Encoder;Lspace/kscience/dataforge/values/ValueType;)V
public fun typeParametersSerializers ()[Lkotlinx/serialization/KSerializer;
}
public final class space/kscience/dataforge/values/ValueType$Companion {
public final fun serializer ()Lkotlinx/serialization/KSerializer;
}

View File

@ -1,9 +1,11 @@
plugins {
id("ru.mipt.npm.gradle.mpp")
id("ru.mipt.npm.gradle.native")
id("space.kscience.gradle.mpp")
}
kscience {
jvm()
js()
native()
useSerialization{
json()
}
@ -12,5 +14,5 @@ kscience {
description = "Meta definition and basic operations on meta"
readme{
maturity = ru.mipt.npm.gradle.Maturity.DEVELOPMENT
maturity = space.kscience.gradle.Maturity.DEVELOPMENT
}

View File

@ -6,7 +6,6 @@ import kotlinx.serialization.json.*
import space.kscience.dataforge.meta.descriptors.MetaDescriptor
import space.kscience.dataforge.meta.descriptors.get
import space.kscience.dataforge.names.NameToken
import space.kscience.dataforge.values.*
private const val jsonArrayKey: String = "@jsonArray"
@ -24,7 +23,6 @@ public fun Value.toJson(descriptor: MetaDescriptor? = null): JsonElement = when
}
//Use these methods to customize JSON key mapping
@Suppress("NULLABLE_EXTENSION_OPERATOR_WITH_SAFE_CALL_RECEIVER")
private fun String.toJsonKey(descriptor: MetaDescriptor?) = descriptor?.attributes?.get("jsonName").string ?: toString()
private fun Meta.toJsonWithIndex(descriptor: MetaDescriptor?, index: String?): JsonElement = if (items.isEmpty()) {
@ -36,9 +34,13 @@ private fun Meta.toJsonWithIndex(descriptor: MetaDescriptor?, index: String?): J
val childDescriptor = descriptor?.children?.get(body)
if (list.size == 1) {
val (token, element) = list.first()
//do not add empty element
val child: JsonElement = element.toJsonWithIndex(childDescriptor, token.index)
body to child
//do not add an empty element
val child: JsonElement = element.toJsonWithIndex(childDescriptor, token.index)
if(token.index == null) {
body to child
} else {
body to JsonArray(listOf(child))
}
} else {
val elements: List<JsonElement> = list.sortedBy { it.key.index }.mapIndexed { index, entry ->
//Use index if it is not equal to the item order
@ -62,30 +64,24 @@ private fun Meta.toJsonWithIndex(descriptor: MetaDescriptor?, index: String?): J
JsonObject(pairs.toMap())
}
public fun Meta.toJson(descriptor: MetaDescriptor? = null): JsonObject {
val element = toJsonWithIndex(descriptor, null)
return if (element is JsonObject) {
element
} else {
buildJsonObject {
put("@value", element)
}
}
}
/**
* Convert Meta to [JsonElement]. Meta with children is converted to [JsonObject].
* Meta without children is converted to either [JsonPrimitive] or [JsonArray] depending on the value type.
* An empty Meta is converted to an empty JsonObject.
*/
public fun Meta.toJson(descriptor: MetaDescriptor? = null): JsonElement = toJsonWithIndex(descriptor, null)
/**
* Convert a Json primitive to a [Value]
*/
public fun JsonPrimitive.toValue(descriptor: MetaDescriptor?): Value {
return when (this) {
JsonNull -> Null
else -> {
if (isString) {
StringValue(content)
} else {
//consider using LazyParse
content.parseValue()
}
public fun JsonPrimitive.toValue(descriptor: MetaDescriptor?): Value = when (this) {
JsonNull -> Null
else -> {
if (isString) {
content.asValue()
} else {
//consider using LazyParse
Value.parse(content)
}
}
}

View File

@ -2,7 +2,6 @@ package space.kscience.dataforge.meta
import space.kscience.dataforge.names.Name
import space.kscience.dataforge.names.NameToken
import space.kscience.dataforge.values.Value
/**
* A meta laminate consisting of multiple immutable meta layers. For mutable front layer, use [Scheme].
@ -18,8 +17,8 @@ public class Laminate internal constructor(public val layers: List<Meta>) : Type
}
}
override fun getMeta(name: Name): Laminate? {
val childLayers = layers.mapNotNull { it.getMeta(name) }
override fun get(name: Name): Laminate? {
val childLayers = layers.mapNotNull { it.get(name) }
return if (childLayers.isEmpty()) null else Laminate(childLayers)
}

View File

@ -2,10 +2,10 @@ package space.kscience.dataforge.meta
import kotlinx.serialization.Serializable
import kotlinx.serialization.json.Json
import space.kscience.dataforge.misc.Type
import space.kscience.dataforge.misc.DfId
import space.kscience.dataforge.misc.unsafeCast
import space.kscience.dataforge.names.*
import space.kscience.dataforge.values.*
import kotlin.jvm.JvmName
/**
@ -21,9 +21,9 @@ public interface MetaRepr {
* A container for meta nodes
*/
public fun interface MetaProvider : ValueProvider {
public fun getMeta(name: Name): Meta?
public operator fun get(name: Name): Meta?
override fun getValue(name: Name): Value? = getMeta(name)?.value
override fun getValue(name: Name): Value? = get(name)?.value
}
/**
@ -31,13 +31,13 @@ public fun interface MetaProvider : ValueProvider {
* TODO add documentation
* Same name siblings are supported via elements with the same [Name] but different indices.
*/
@Type(Meta.TYPE)
@DfId(Meta.TYPE)
@Serializable(MetaSerializer::class)
public interface Meta : MetaRepr, MetaProvider {
public val value: Value?
public val items: Map<NameToken, Meta>
override fun getMeta(name: Name): Meta? {
override fun get(name: Name): Meta? {
tailrec fun Meta.find(name: Name): Meta? = if (name.isEmpty()) {
this
} else {
@ -50,7 +50,9 @@ public interface Meta : MetaRepr, MetaProvider {
override fun toMeta(): Meta = this
override fun toString(): String
override fun equals(other: Any?): Boolean
override fun hashCode(): Int
public companion object {
@ -95,6 +97,10 @@ public interface Meta : MetaRepr, MetaProvider {
public val Meta.isLeaf: Boolean get() = items.isEmpty()
public operator fun Meta?.get(token: NameToken): Meta? = this?.items?.get(token)
@Deprecated("Use nullable receiver", level = DeprecationLevel.HIDDEN)
@JvmName("getNonNullable")
public operator fun Meta.get(token: NameToken): Meta? = items[token]
/**
@ -102,12 +108,12 @@ public operator fun Meta.get(token: NameToken): Meta? = items[token]
*
* If [name] is empty return current [Meta]
*/
public operator fun Meta.get(name: Name): Meta? = getMeta(name)
public operator fun Meta?.get(name: Name): Meta? = this?.get(name)
/**
* Parse [Name] from [key] using full name notation and pass it to [Meta.get]
*/
public operator fun Meta.get(key: String): Meta? = this[Name.parse(key)]
public operator fun Meta?.get(key: String): Meta? = this?.get(key.parseAsName(true))
/**
* Get all items matching given name. The index of the last element, if present is used as a [Regex],
@ -120,7 +126,9 @@ public fun Meta.getIndexed(name: Name): Map<String?, Meta> {
else -> this[name.cutLast()] ?: return emptyMap()
}
val (body, index) = name.lastOrNull()!!
val lastName = name.lastOrNull()!!
val body = lastName.body
val index = lastName.index
return if (index == null) {
root.items
.filter { it.key.body == body }
@ -133,6 +141,7 @@ public fun Meta.getIndexed(name: Name): Map<String?, Meta> {
}
}
public fun Meta.getIndexed(name: String): Map<String?, Meta> = getIndexed(name.parseAsName(true))
/**
* A meta node that ensures that all of its descendants has at least the same type.
@ -142,7 +151,7 @@ public interface TypedMeta<out M : TypedMeta<M>> : Meta {
override val items: Map<NameToken, M>
override fun getMeta(name: Name): M? {
override fun get(name: Name): M? {
tailrec fun M.find(name: Name): M? = if (name.isEmpty()) {
this
} else {
@ -162,23 +171,20 @@ public inline val <M : TypedMeta<M>> TypedMeta<M>.self: M get() = unsafeCast()
//public typealias Meta = TypedMeta<*>
public operator fun <M : TypedMeta<M>> TypedMeta<M>.get(token: NameToken): M? = items[token]
public operator fun <M : TypedMeta<M>> TypedMeta<M>?.get(token: NameToken): M? = this?.items?.get(token)
/**
* Perform recursive item search using given [name]. Each [NameToken] is treated as a name in [TypedMeta.items] of a parent node.
* Retrieves a meta node with the given name from the nullable [TypedMeta] object.
*
* If [name] is empty return current [Meta]
* @param name The name of the meta node to retrieve.
* @return The meta node with the given name, or null if it doesn't exist.
*/
public tailrec operator fun <M : TypedMeta<M>> TypedMeta<M>.get(name: Name): M? = if (name.isEmpty()) {
self
} else {
get(name.firstOrNull()!!)?.get(name.cutFirst())
}
public operator fun <M : TypedMeta<M>> M?.get(name: Name): M? = this?.get(name)
/**
* Parse [Name] from [key] using full name notation and pass it to [TypedMeta.get]
*/
public operator fun <M : TypedMeta<M>> TypedMeta<M>.get(key: String): M? = this[Name.parse(key)]
public operator fun <M : TypedMeta<M>> M?.get(key: String): M? = this?.get(key.parseAsName(true))
/**
@ -221,7 +227,8 @@ public fun Meta.isEmpty(): Boolean = this === Meta.EMPTY
public fun <M : TypedMeta<M>> TypedMeta<M>.getIndexed(name: Name): Map<String?, M> =
(this as Meta).getIndexed(name) as Map<String?, M>
public fun <M : TypedMeta<M>> TypedMeta<M>.getIndexed(name: String): Map<String?, Meta> = getIndexed(Name.parse(name))
public fun <M : TypedMeta<M>> TypedMeta<M>.getIndexed(name: String): Map<String?, Meta> =
getIndexed(name.parseAsName(true))
public val Meta?.string: String? get() = this?.value?.string
@ -246,8 +253,9 @@ public val Meta.stringList: List<String>? get() = value?.list?.map { it.string }
/**
* Create a provider that uses given provider for default values if those are not found in this provider
*/
public fun Meta.withDefault(default: Meta?): Meta = if (default == null) {
public fun Meta.withDefault(default: MetaProvider?): Meta = if (default == null) {
this
} else {
Laminate(this, default)
//TODO optimize
toMutableMeta().withDefault(default)
}

View File

@ -3,34 +3,33 @@ package space.kscience.dataforge.meta
import space.kscience.dataforge.meta.transformations.MetaConverter
import space.kscience.dataforge.names.Name
import space.kscience.dataforge.names.asName
import space.kscience.dataforge.values.*
import kotlin.properties.ReadOnlyProperty
/* Meta delegates */
public fun MetaProvider.node(key: Name? = null): ReadOnlyProperty<Any?, Meta?> = ReadOnlyProperty { _, property ->
getMeta(key ?: property.name.asName())
get(key ?: property.name.asName())
}
public fun <T> MetaProvider.node(
key: Name? = null,
converter: MetaConverter<T>
): ReadOnlyProperty<Any?, T?> = ReadOnlyProperty { _, property ->
getMeta(key ?: property.name.asName())?.let { converter.metaToObject(it) }
get(key ?: property.name.asName())?.let { converter.metaToObject(it) }
}
/**
* A property delegate that uses custom key
*/
public fun MetaProvider.value(key: Name? = null): ReadOnlyProperty<Any?, Value?> = ReadOnlyProperty { _, property ->
getMeta(key ?: property.name.asName())?.value
get(key ?: property.name.asName())?.value
}
public fun <R> MetaProvider.value(
key: Name? = null,
reader: (Value?) -> R
): ReadOnlyProperty<Any?, R> = ReadOnlyProperty { _, property ->
reader(getMeta(key ?: property.name.asName())?.value)
reader(get(key ?: property.name.asName())?.value)
}
//TODO add caching for sealed nodes

View File

@ -2,30 +2,24 @@ package space.kscience.dataforge.meta
import kotlinx.serialization.Serializable
import space.kscience.dataforge.misc.DFExperimental
import space.kscience.dataforge.misc.ThreadSafe
import space.kscience.dataforge.names.*
import space.kscience.dataforge.values.EnumValue
import space.kscience.dataforge.values.MutableValueProvider
import space.kscience.dataforge.values.Value
import space.kscience.dataforge.values.asValue
import kotlin.js.JsName
import kotlin.jvm.Synchronized
/**
* Mark a meta builder
*/
@DslMarker
public annotation class MetaBuilder
public annotation class MetaBuilderMarker
/**
* A generic interface that gives access to getting and setting meta notes and values
*/
public interface MutableMetaProvider : MetaProvider, MutableValueProvider {
override fun getMeta(name: Name): MutableMeta?
public fun setMeta(name: Name, node: Meta?)
override fun setValue(name: Name, value: Value?) {
getMeta(name)?.value = value
}
override fun get(name: Name): MutableMeta?
public operator fun set(name: Name, node: Meta?)
override fun setValue(name: Name, value: Value?)
}
/**
@ -33,7 +27,7 @@ public interface MutableMetaProvider : MetaProvider, MutableValueProvider {
* TODO documentation
*/
@Serializable(MutableMetaSerializer::class)
@MetaBuilder
@MetaBuilderMarker
public interface MutableMeta : Meta, MutableMetaProvider {
override val items: Map<NameToken, MutableMeta>
@ -43,7 +37,7 @@ public interface MutableMeta : Meta, MutableMetaProvider {
*/
override var value: Value?
override fun getMeta(name: Name): MutableMeta? {
override fun get(name: Name): MutableMeta? {
tailrec fun MutableMeta.find(name: Name): MutableMeta? = if (name.isEmpty()) {
this
} else {
@ -89,19 +83,19 @@ public interface MutableMeta : Meta, MutableMetaProvider {
}
public infix fun Name.put(meta: Meta) {
setMeta(this, meta)
set(this, meta)
}
public infix fun Name.put(repr: MetaRepr) {
setMeta(this, repr.toMeta())
set(this, repr.toMeta())
}
public infix fun Name.put(mutableMeta: MutableMeta.() -> Unit) {
setMeta(this, Meta(mutableMeta))
public infix fun Name.put(builder: MutableMeta.() -> Unit) {
getOrCreate(this).apply(builder)
}
public infix fun String.put(meta: Meta) {
setMeta(Name.parse(this), meta)
set(Name.parse(this), meta)
}
public infix fun String.put(value: Value?) {
@ -129,7 +123,7 @@ public interface MutableMeta : Meta, MutableMetaProvider {
}
public infix fun String.put(repr: MetaRepr) {
setMeta(Name.parse(this), repr.toMeta())
set(Name.parse(this), repr.toMeta())
}
public infix fun String.putIndexed(iterable: Iterable<Meta>) {
@ -137,68 +131,43 @@ public interface MutableMeta : Meta, MutableMetaProvider {
}
public infix fun String.put(builder: MutableMeta.() -> Unit) {
setMeta(Name.parse(this), MutableMeta(builder))
getOrCreate(parseAsName()).apply(builder)
}
}
/**
* Set or replace node at given [name]
*/
public operator fun MutableMeta.set(name: Name, meta: Meta): Unit = setMeta(name, meta)
/**
* Set or replace value at given [name]
*/
public operator fun MutableMeta.set(name: Name, value: Value?): Unit = setValue(name, value)
public operator fun MutableValueProvider.set(name: Name, value: Value?): Unit = setValue(name, value)
public fun MutableMeta.getOrCreate(key: String): MutableMeta = getOrCreate(Name.parse(key))
public interface MutableTypedMeta<M : MutableTypedMeta<M>> : TypedMeta<M>, MutableMeta {
/**
* Zero-copy attach or replace existing node. Node is used with any additional state, listeners, etc.
* Zero-copy (if possible) attach or replace existing node. Node is used with any additional state, listeners, etc.
* In some cases it is possible to have the same node as a child to several others
*/
@DFExperimental
public fun attach(name: Name, node: M)
override fun getMeta(name: Name): M?
override fun get(name: Name): M?
override fun getOrCreate(name: Name): M
}
public fun <M : MutableTypedMeta<M>> M.getOrCreate(key: String): M = getOrCreate(Name.parse(key))
public fun MutableMetaProvider.remove(name: Name) {
setMeta(name, null)
set(name, null)
}
public fun MutableMetaProvider.remove(key: String) {
setMeta(Name.parse(key), null)
set(Name.parse(key), null)
}
// node setters
public operator fun MutableMetaProvider.set(Key: NameToken, value: Meta): Unit = setMeta(Key.asName(), value)
public operator fun MutableMetaProvider.set(key: String, value: Meta): Unit = setMeta(Name.parse(key), value)
public operator fun MutableMetaProvider.set(Key: NameToken, value: Meta): Unit = set(Key.asName(), value)
public operator fun MutableMetaProvider.set(key: String, value: Meta): Unit = set(Name.parse(key), value)
//value setters
public operator fun MutableMeta.set(name: NameToken, value: Value?): Unit = set(name.asName(), value)
public operator fun MutableMeta.set(key: String, value: Value?): Unit = set(Name.parse(key), value)
public operator fun MutableMeta.set(name: Name, value: String): Unit = set(name, value.asValue())
public operator fun MutableMeta.set(name: NameToken, value: String): Unit = set(name.asName(), value.asValue())
public operator fun MutableMeta.set(key: String, value: String): Unit = set(Name.parse(key), value.asValue())
public operator fun MutableMeta.set(name: Name, value: Boolean): Unit = set(name, value.asValue())
public operator fun MutableMeta.set(name: NameToken, value: Boolean): Unit = set(name.asName(), value.asValue())
public operator fun MutableMeta.set(key: String, value: Boolean): Unit = set(Name.parse(key), value.asValue())
public operator fun MutableMeta.set(name: Name, value: Number): Unit = set(name, value.asValue())
public operator fun MutableMeta.set(name: NameToken, value: Number): Unit = set(name.asName(), value.asValue())
public operator fun MutableMeta.set(key: String, value: Number): Unit = set(Name.parse(key), value.asValue())
public operator fun MutableMeta.set(name: Name, value: List<Value>): Unit = set(name, value.asValue())
public operator fun MutableMeta.set(name: NameToken, value: List<Value>): Unit = set(name.asName(), value.asValue())
public operator fun MutableMeta.set(key: String, value: List<Value>): Unit = set(Name.parse(key), value.asValue())
//public fun MutableMeta.set(key: String, index: String, value: Value?): Unit =
// set(key.toName().withIndex(index), value)
@ -207,7 +176,7 @@ public operator fun MutableMeta.set(key: String, value: List<Value>): Unit = set
/* Same name siblings generation */
public fun MutableMeta.setIndexed(
public fun MutableMetaProvider.setIndexed(
name: Name,
metas: Iterable<Meta>,
indexFactory: (Meta, index: Int) -> String = { _, index -> index.toString() },
@ -221,10 +190,10 @@ public fun MutableMeta.setIndexed(
}
}
public operator fun MutableMeta.set(name: Name, metas: Iterable<Meta>): Unit =
public operator fun MutableMetaProvider.set(name: Name, metas: Iterable<Meta>): Unit =
setIndexed(name, metas)
public operator fun MutableMeta.set(key: String, metas: Iterable<Meta>): Unit =
public operator fun MutableMetaProvider.set(key: String, metas: Iterable<Meta>): Unit =
setIndexed(Name.parse(key), metas)
@ -234,7 +203,7 @@ public operator fun MutableMeta.set(key: String, metas: Iterable<Meta>): Unit =
* * node updates node and replaces anything but node
* * node list updates node list if number of nodes in the list is the same and replaces anything otherwise
*/
public fun MutableMeta.update(meta: Meta) {
public fun MutableMetaProvider.update(meta: Meta) {
meta.valueSequence().forEach { (name, value) ->
set(name, value)
}
@ -261,16 +230,23 @@ public operator fun <M : MutableTypedMeta<M>> MutableTypedMeta<M>.set(name: Name
}
}
private fun ObservableMeta.adoptBy(parent: MutableMetaImpl, key: NameToken) {
if (this === parent) error("Can't attach a node to itself")
onChange(parent) { name ->
parent.invalidate(key + name)
}
}
/**
* A general implementation of mutable [Meta] which implements both [MutableTypedMeta] and [ObservableMeta].
* The implementation uses blocking synchronization on mutation on JVM
*/
private class MutableMetaImpl(
value: Value?,
children: Map<NameToken, Meta> = emptyMap()
children: Map<NameToken, Meta> = emptyMap(),
) : AbstractObservableMeta(), ObservableMutableMeta {
override var value = value
@Synchronized set(value) {
@ThreadSafe set(value) {
val oldValue = field
field = value
if (oldValue != value) {
@ -280,24 +256,16 @@ private class MutableMetaImpl(
private val children: LinkedHashMap<NameToken, ObservableMutableMeta> =
LinkedHashMap(children.mapValues { (key, meta) ->
MutableMetaImpl(meta.value, meta.items).apply { adoptBy(this, key) }
MutableMetaImpl(meta.value, meta.items).also { it.adoptBy(this, key) }
})
override val items: Map<NameToken, ObservableMutableMeta> get() = children
private fun ObservableMeta.adoptBy(parent: MutableMetaImpl, key: NameToken) {
onChange(parent) { name ->
parent.invalidate(key + name)
}
}
@DFExperimental
override fun attach(name: Name, node: ObservableMutableMeta) {
when (name.length) {
0 -> error("Can't set a meta with empty name")
1 -> {
replaceItem(name.first(), get(name), node)
}
1 -> replaceItem(name.first(), get(name), node)
else -> get(name.cutLast())?.attach(name.lastOrNull()!!.asName(), node)
}
}
@ -319,11 +287,11 @@ private class MutableMetaImpl(
override fun getOrCreate(name: Name): ObservableMutableMeta =
if (name.isEmpty()) this else get(name) ?: createNode(name)
@Synchronized
@ThreadSafe
private fun replaceItem(
key: NameToken,
oldItem: ObservableMutableMeta?,
newItem: ObservableMutableMeta?
newItem: ObservableMutableMeta?,
) {
if (oldItem != newItem) {
if (newItem == null) {
@ -338,25 +306,42 @@ private class MutableMetaImpl(
}
private fun wrapItem(meta: Meta): MutableMetaImpl =
MutableMetaImpl(meta.value, meta.items.mapValuesTo(LinkedHashMap()) { wrapItem(it.value) })
meta as? MutableMetaImpl ?: MutableMetaImpl(
meta.value,
meta.items.mapValuesTo(LinkedHashMap()) {
wrapItem(it.value)
}
)
override fun setMeta(name: Name, node: Meta?) {
@ThreadSafe
override fun set(name: Name, node: Meta?) {
val oldItem: ObservableMutableMeta? = get(name)
if (oldItem != node) {
when (name.length) {
0 -> error("Can't set a meta with empty name")
1 -> {
val token = name.firstOrNull()!!
replaceItem(token, oldItem, node?.let { wrapItem(node) })
//remove child and invalidate if argument is null
if (node == null) {
children.remove(token)?.removeListener(this)
// old item is not null otherwise we can't be here
invalidate(name)
} else {
val newNode = wrapItem(node)
newNode.adoptBy(this, token)
children[token] = newNode
}
}
else -> {
val token = name.firstOrNull()!!
//get existing or create new node. Index is ignored for new node
//get existing or create new node.
if (items[token] == null) {
replaceItem(token, null, MutableMetaImpl(null))
val newNode = MutableMetaImpl(null)
newNode.adoptBy(this, token)
children[token] = newNode
}
items[token]?.setMeta(name.cutFirst(), node)
items[token]?.set(name.cutFirst(), node)
}
}
invalidate(name)
@ -384,19 +369,6 @@ public fun MutableMeta.append(name: Name, value: Value): Unit = append(name, Met
public fun MutableMeta.append(key: String, value: Value): Unit = append(Name.parse(key), value)
///**
// * Apply existing node with given [builder] or create a new element with it.
// */
//@DFExperimental
//public fun MutableMeta.edit(name: Name, builder: MutableMeta.() -> Unit) {
// val item = when (val existingItem = get(name)) {
// null -> MutableMeta().also { set(name, it) }
// is MetaItemNode<MutableMeta> -> existingItem.node
// else -> error("Can't edit value meta item")
// }
// item.apply(builder)
//}
/**
* Create a mutable copy of this meta. The copy is created even if the Meta is already mutable
*/
@ -404,16 +376,14 @@ public fun Meta.toMutableMeta(): ObservableMutableMeta = MutableMetaImpl(value,
public fun Meta.asMutableMeta(): MutableMeta = (this as? MutableMeta) ?: toMutableMeta()
@Suppress("FunctionName")
@JsName("newMutableMeta")
public fun MutableMeta(): ObservableMutableMeta = MutableMetaImpl(null)
@JsName("newObservableMutableMeta")
public fun ObservableMutableMeta(): ObservableMutableMeta = MutableMetaImpl(null)
/**
* Build a [MutableMeta] using given transformation
*/
@Suppress("FunctionName")
public inline fun MutableMeta(builder: MutableMeta.() -> Unit = {}): ObservableMutableMeta =
MutableMeta().apply(builder)
public inline fun ObservableMutableMeta(builder: MutableMeta.() -> Unit = {}): ObservableMutableMeta =
ObservableMutableMeta().apply(builder)
/**
@ -425,7 +395,7 @@ public inline fun Meta.copy(block: MutableMeta.() -> Unit = {}): Meta =
private class MutableMetaWithDefault(
val source: MutableMeta, val default: Meta, val rootName: Name
val source: MutableMeta, val default: MetaProvider, val rootName: Name,
) : MutableMeta by source {
override val items: Map<NameToken, MutableMeta>
get() {
@ -438,12 +408,12 @@ private class MutableMetaWithDefault(
}
override var value: Value?
get() = source[rootName]?.value ?: default[rootName]?.value
get() = source[rootName]?.value ?: default.get(rootName)?.value
set(value) {
source[rootName] = value
}
override fun getMeta(name: Name): MutableMeta = MutableMetaWithDefault(source, default, rootName + name)
override fun get(name: Name): MutableMeta = MutableMetaWithDefault(source, default, rootName + name)
override fun toString(): String = Meta.toString(this)
override fun equals(other: Any?): Boolean = Meta.equals(this, other as? Meta)
@ -454,7 +424,6 @@ private class MutableMetaWithDefault(
* Create a mutable item provider that uses given provider for default values if those are not found in this provider.
* Changes are propagated only to this provider.
*/
public fun MutableMeta.withDefault(default: Meta?): MutableMeta = if (default == null || default.isEmpty()) {
//Optimize for use with empty default
public fun MutableMeta.withDefault(default: MetaProvider?): MutableMeta = if (default == null) {
this
} else MutableMetaWithDefault(this, default, Name.EMPTY)

View File

@ -3,7 +3,6 @@ package space.kscience.dataforge.meta
import space.kscience.dataforge.meta.transformations.MetaConverter
import space.kscience.dataforge.names.Name
import space.kscience.dataforge.names.asName
import space.kscience.dataforge.values.*
import kotlin.properties.ReadWriteProperty
import kotlin.reflect.KProperty
@ -12,31 +11,31 @@ import kotlin.reflect.KProperty
public fun MutableMetaProvider.node(key: Name? = null): ReadWriteProperty<Any?, Meta?> =
object : ReadWriteProperty<Any?, Meta?> {
override fun getValue(thisRef: Any?, property: KProperty<*>): Meta? {
return getMeta(key ?: property.name.asName())
return get(key ?: property.name.asName())
}
override fun setValue(thisRef: Any?, property: KProperty<*>, value: Meta?) {
val name = key ?: property.name.asName()
setMeta(name, value)
set(name, value)
}
}
public fun <T> MutableMetaProvider.node(key: Name? = null, converter: MetaConverter<T>): ReadWriteProperty<Any?, T?> =
object : ReadWriteProperty<Any?, T?> {
override fun getValue(thisRef: Any?, property: KProperty<*>): T? {
return getMeta(key ?: property.name.asName())?.let { converter.metaToObject(it) }
return get(key ?: property.name.asName())?.let { converter.metaToObject(it) }
}
override fun setValue(thisRef: Any?, property: KProperty<*>, value: T?) {
val name = key ?: property.name.asName()
setMeta(name, value?.let { converter.objectToMeta(it) })
set(name, value?.let { converter.objectToMeta(it) })
}
}
public fun MutableMetaProvider.value(key: Name? = null): ReadWriteProperty<Any?, Value?> =
object : ReadWriteProperty<Any?, Value?> {
override fun getValue(thisRef: Any?, property: KProperty<*>): Value? =
getMeta(key ?: property.name.asName())?.value
get(key ?: property.name.asName())?.value
override fun setValue(thisRef: Any?, property: KProperty<*>, value: Value?) {
setValue(key ?: property.name.asName(), value)
@ -49,7 +48,7 @@ public fun <T> MutableMetaProvider.value(
reader: (Value?) -> T
): ReadWriteProperty<Any?, T> = object : ReadWriteProperty<Any?, T> {
override fun getValue(thisRef: Any?, property: KProperty<*>): T =
reader(getMeta(key ?: property.name.asName())?.value)
reader(get(key ?: property.name.asName())?.value)
override fun setValue(thisRef: Any?, property: KProperty<*>, value: T) {
setValue(key ?: property.name.asName(), writer(value))

View File

@ -1,7 +1,7 @@
package space.kscience.dataforge.meta
import space.kscience.dataforge.misc.ThreadSafe
import space.kscience.dataforge.names.*
import kotlin.jvm.Synchronized
import kotlin.reflect.KProperty1
@ -36,7 +36,7 @@ public interface ObservableMeta : Meta {
public interface ObservableMutableMeta : ObservableMeta, MutableMeta, MutableTypedMeta<ObservableMutableMeta> {
override fun getOrCreate(name: Name): ObservableMutableMeta
override fun getMeta(name: Name): ObservableMutableMeta? {
override fun get(name: Name): ObservableMutableMeta? {
tailrec fun ObservableMutableMeta.find(name: Name): ObservableMutableMeta? = if (name.isEmpty()) {
this
} else {
@ -48,18 +48,18 @@ public interface ObservableMutableMeta : ObservableMeta, MutableMeta, MutableTyp
}
internal abstract class AbstractObservableMeta : ObservableMeta {
private val listeners = HashSet<MetaListener>()
private val listeners: MutableList<MetaListener> = mutableListOf()
override fun invalidate(name: Name) {
listeners.forEach { it.callback(this, name) }
}
@Synchronized
@ThreadSafe
override fun onChange(owner: Any?, callback: Meta.(name: Name) -> Unit) {
listeners.add(MetaListener(owner, callback))
}
@Synchronized
@ThreadSafe
override fun removeListener(owner: Any?) {
listeners.removeAll { it.owner === owner }
}

View File

@ -1,9 +1,8 @@
package space.kscience.dataforge.meta
import space.kscience.dataforge.misc.DFExperimental
import space.kscience.dataforge.misc.ThreadSafe
import space.kscience.dataforge.names.*
import space.kscience.dataforge.values.Value
import kotlin.jvm.Synchronized
/**
* A class that takes [MutableMeta] provider and adds obsevability on top of that
@ -11,22 +10,22 @@ import kotlin.jvm.Synchronized
private class ObservableMetaWrapper(
val root: MutableMeta,
val absoluteName: Name,
val listeners: MutableSet<MetaListener>
val listeners: MutableSet<MetaListener>,
) : ObservableMutableMeta {
override val items: Map<NameToken, ObservableMutableMeta>
get() = root.items.mapValues {
ObservableMetaWrapper(root, absoluteName + it.key, listeners)
get() = root.items.keys.associateWith {
ObservableMetaWrapper(root, absoluteName + it, listeners)
}
override fun getMeta(name: Name): ObservableMutableMeta? =
root.getMeta(name)?.let { ObservableMetaWrapper(root, this.absoluteName + name, listeners) }
override fun get(name: Name): ObservableMutableMeta? =
root.get(name)?.let { ObservableMetaWrapper(root, this.absoluteName + name, listeners) }
@Synchronized
@ThreadSafe
override fun onChange(owner: Any?, callback: Meta.(name: Name) -> Unit) {
listeners.add(
MetaListener(Pair(owner, absoluteName)) { name ->
if (name.startsWith(absoluteName)) {
(this[absoluteName] ?: Meta.EMPTY).callback(name.removeHeadOrNull(absoluteName)!!)
(this[absoluteName] ?: Meta.EMPTY).callback(name.removeFirstOrNull(absoluteName)!!)
}
}
)
@ -50,9 +49,11 @@ private class ObservableMetaWrapper(
override fun getOrCreate(name: Name): ObservableMutableMeta =
ObservableMetaWrapper(root, this.absoluteName + name, listeners)
override fun setMeta(name: Name, node: Meta?) {
override fun set(name: Name, node: Meta?) {
val oldMeta = get(name)
root.setMeta(absoluteName + name, node)
//don't forget to remove listener
oldMeta?.removeListener(this)
root.set(absoluteName + name, node)
if (oldMeta != node) {
invalidate(name)
}
@ -68,7 +69,7 @@ private class ObservableMetaWrapper(
override fun attach(name: Name, node: ObservableMutableMeta) {
set(name, node)
node.onChange(this) { changeName ->
setMeta(name + changeName, node[changeName])
set(name + changeName, this[changeName])
}
}
}

Some files were not shown because too many files have changed in this diff Show More