diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 6dd1a0ec00..bf8332107a 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -93,7 +93,7 @@ jobs: if: matrix.os == 'macos-latest' uses: actions/cache/save@v4 with: - path: dist/darwin_amd64 + path: dist/darwin_arm64 key: darwin-${{ env.sha_short }} - name: Save cache on Windows @@ -146,7 +146,7 @@ jobs: id: restore-macos uses: actions/cache/restore@v4 with: - path: dist/darwin_amd64 + path: dist/darwin_arm64 key: darwin-${{ env.sha_short }} fail-on-cache-miss: true diff --git a/.github/workflows/test-and-upload-coverage.yml b/.github/workflows/test-and-upload-coverage.yml index 60858b1f86..491b674906 100644 --- a/.github/workflows/test-and-upload-coverage.yml +++ b/.github/workflows/test-and-upload-coverage.yml @@ -46,11 +46,13 @@ jobs: database-type: badger-memory mutation-type: collection-save detect-changes: false - - os: windows-latest - client-type: go - database-type: badger-memory - mutation-type: collection-save - detect-changes: false +## TODO: https://github.com/sourcenetwork/defradb/issues/2080 +## Uncomment the lines below to Re-enable the windows build once this todo is resolved. +## - os: windows-latest +## client-type: go +## database-type: badger-memory +## mutation-type: collection-save +## detect-changes: false runs-on: ${{ matrix.os }} diff --git a/CHANGELOG.md b/CHANGELOG.md index 342cfb3a53..7345a58cc8 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,70 @@ + + +## [v0.11.0](https://github.com/sourcenetwork/defradb/compare/v0.10.0...v0.11.0) + +> 2024-05-03 + +DefraDB v0.11 is a major pre-production release. Until the stable version 1.0 is reached, the SemVer minor patch number will denote notable releases, which will give the project freedom to experiment and explore potentially breaking changes. + +To get a full outline of the changes, we invite you to review the official changelog below. This release does include a Breaking Change to existing v0.10.x databases. If you need help migrating an existing deployment, reach out at [hello@source.network](mailto:hello@source.network) or join our Discord at https://discord.gg/w7jYQVJ/. + +### Features + +* Update corelog to 0.0.7 ([#2547](https://github.com/sourcenetwork/defradb/issues/2547)) +* Move relation field properties onto collection ([#2529](https://github.com/sourcenetwork/defradb/issues/2529)) +* Lens runtime config ([#2497](https://github.com/sourcenetwork/defradb/issues/2497)) +* Add P Counter CRDT ([#2482](https://github.com/sourcenetwork/defradb/issues/2482)) +* Add Access Control Policy ([#2338](https://github.com/sourcenetwork/defradb/issues/2338)) +* Force explicit primary decl. in SDL for one-ones ([#2462](https://github.com/sourcenetwork/defradb/issues/2462)) +* Allow mutation of col sources via PatchCollection ([#2424](https://github.com/sourcenetwork/defradb/issues/2424)) +* Add Defra-Lens support for branching schema ([#2421](https://github.com/sourcenetwork/defradb/issues/2421)) +* Add PatchCollection ([#2402](https://github.com/sourcenetwork/defradb/issues/2402)) + +### Fixes + +* Return correct results from one-many indexed filter ([#2579](https://github.com/sourcenetwork/defradb/issues/2579)) +* Handle compound filters on related indexed fields ([#2575](https://github.com/sourcenetwork/defradb/issues/2575)) +* Add check to filter result for logical ops ([#2573](https://github.com/sourcenetwork/defradb/issues/2573)) +* Make all array kinds nillable ([#2534](https://github.com/sourcenetwork/defradb/issues/2534)) +* Allow update when updating non-indexed field ([#2511](https://github.com/sourcenetwork/defradb/issues/2511)) + +### Documentation + +* Add data definition document ([#2544](https://github.com/sourcenetwork/defradb/issues/2544)) + +### Refactoring + +* Merge collection UpdateWith and DeleteWith ([#2531](https://github.com/sourcenetwork/defradb/issues/2531)) +* DB transactions context ([#2513](https://github.com/sourcenetwork/defradb/issues/2513)) +* Add NormalValue ([#2404](https://github.com/sourcenetwork/defradb/issues/2404)) +* Clean up client/request package ([#2443](https://github.com/sourcenetwork/defradb/issues/2443)) +* Rewrite convertImmutable ([#2445](https://github.com/sourcenetwork/defradb/issues/2445)) +* Unify Field Kind and Schema properties ([#2414](https://github.com/sourcenetwork/defradb/issues/2414)) +* Replace logging package with corelog ([#2406](https://github.com/sourcenetwork/defradb/issues/2406)) + +### Testing + +* Add flag to skip network tests ([#2495](https://github.com/sourcenetwork/defradb/issues/2495)) + +### Bot + +* Update dependencies (bulk dependabot PRs) 30-04-2024 ([#2570](https://github.com/sourcenetwork/defradb/issues/2570)) +* Bump [@typescript](https://github.com/typescript)-eslint/parser from 7.7.0 to 7.7.1 in /playground ([#2550](https://github.com/sourcenetwork/defradb/issues/2550)) +* Bump [@typescript](https://github.com/typescript)-eslint/eslint-plugin from 7.7.0 to 7.7.1 in /playground ([#2551](https://github.com/sourcenetwork/defradb/issues/2551)) +* Bump swagger-ui-react from 5.16.2 to 5.17.0 in /playground ([#2549](https://github.com/sourcenetwork/defradb/issues/2549)) +* Update dependencies (bulk dependabot PRs) 23-04-2023 ([#2548](https://github.com/sourcenetwork/defradb/issues/2548)) +* Bump go.opentelemetry.io/otel/sdk/metric from 1.24.0 to 1.25.0 ([#2499](https://github.com/sourcenetwork/defradb/issues/2499)) +* Bump typescript from 5.4.3 to 5.4.5 in /playground ([#2515](https://github.com/sourcenetwork/defradb/issues/2515)) +* Bump swagger-ui-react from 5.14.0 to 5.15.0 in /playground ([#2514](https://github.com/sourcenetwork/defradb/issues/2514)) +* Update dependencies (bulk dependabot PRs) 2024-04-09 ([#2509](https://github.com/sourcenetwork/defradb/issues/2509)) +* Update dependencies (bulk dependabot PRs) 2024-04-03 ([#2492](https://github.com/sourcenetwork/defradb/issues/2492)) +* Update dependencies (bulk dependabot PRs) 03-04-2024 ([#2486](https://github.com/sourcenetwork/defradb/issues/2486)) +* Bump github.com/multiformats/go-multiaddr from 0.12.2 to 0.12.3 ([#2480](https://github.com/sourcenetwork/defradb/issues/2480)) +* Bump [@types](https://github.com/types)/react from 18.2.66 to 18.2.67 in /playground ([#2427](https://github.com/sourcenetwork/defradb/issues/2427)) +* Bump [@typescript](https://github.com/typescript)-eslint/parser from 7.2.0 to 7.3.1 in /playground ([#2428](https://github.com/sourcenetwork/defradb/issues/2428)) +* Update dependencies (bulk dependabot PRs) 19-03-2024 ([#2426](https://github.com/sourcenetwork/defradb/issues/2426)) +* Update dependencies (bulk dependabot PRs) 03-11-2024 ([#2399](https://github.com/sourcenetwork/defradb/issues/2399)) + ## [v0.10.0](https://github.com/sourcenetwork/defradb/compare/v0.9.0...v0.10.0) diff --git a/Makefile b/Makefile index cde535be4b..658b514a4b 100644 --- a/Makefile +++ b/Makefile @@ -202,7 +202,7 @@ verify: .PHONY: tidy tidy: - go mod tidy -go=1.21 + go mod tidy -go=1.21.3 .PHONY: clean clean: diff --git a/README.md b/README.md index a7156888b9..220c48f842 100644 --- a/README.md +++ b/README.md @@ -17,7 +17,6 @@ Read the documentation on [docs.source.network](https://docs.source.network/). ## Table of Contents -- [Early Access](#early-access) - [Install](#install) - [Start](#start) - [Configuration](#configuration) @@ -32,12 +31,14 @@ Read the documentation on [docs.source.network](https://docs.source.network/). - [Collection subscription example](#collection-subscription-example) - [Replicator example](#replicator-example) - [Securing the HTTP API with TLS](#securing-the-http-api-with-tls) +- [Access Control System](#access-control-system) - [Supporting CORS](#supporting-cors) - [Backing up and restoring](#backing-up-and-restoring) +- [Community](#community) - [Licensing](#licensing) - [Contributors](#contributors) -DISCLAIMER: At this early stage, DefraDB does not offer access control or data encryption, and the default configuration exposes the database to the network. The software is provided "as is" and is not guaranteed to be stable, secure, or error-free. We encourage you to experiment with DefraDB and provide feedback, but please do not use it for production purposes until it has been thoroughly tested and developed. +DISCLAIMER: At this early stage, DefraDB does not offer data encryption, and the default configuration exposes the database to the network. The software is provided "as is" and is not guaranteed to be stable, secure, or error-free. We encourage you to experiment with DefraDB and provide feedback, but please do not use it for production purposes until it has been thoroughly tested and developed. ## Install @@ -397,6 +398,9 @@ defradb start --tls --pubkeypath ~/path-to-pubkey.key --privkeypath ~/path-to-pr ``` +## Access Control System +Read more about the access control [here](./acp/README.md). + ## Supporting CORS When accessing DefraDB through a frontend interface, you may be confronted with a CORS error. That is because, by default, DefraDB will not have any allowed origins set. To specify which origins should be allowed to access your DefraDB endpoint, you can specify them when starting the database: diff --git a/acp/README.md b/acp/README.md new file mode 100644 index 0000000000..697a60a0c2 --- /dev/null +++ b/acp/README.md @@ -0,0 +1,442 @@ +# Introduction + +In the realm of information technology (IT) and cybersecurity, **access control** plays a pivotal role in ensuring the confidentiality, integrity, and availability of sensitive resources. Let's delve into why access control policies are crucial for protecting your valuable data. + +## What Is Access Control? + +**Access control** is a mechanism that regulates who or what can view, use, or access a specific resource within a computing environment. Its primary goal is to minimize security risks by ensuring that only **authorized users**, systems, or services have access to the resources they need. But it's more than just granting or denying access, it involves several key components: + +1. **Authentication**: Verifying the identity of an individual or system. +2. **Authorization**: Determining what actions or operations an actor is allowed to perform. +3. **Access**: Granting or denying access based on authorization. +4. **Management**: Administering access rights and permissions. +5. **Audit**: Tracking and monitoring access patterns for accountability. + +## Why Is Access Control Important? + +1. **Mitigating Security Risks**: Cybercriminals are becoming increasingly sophisticated, employing advanced techniques to breach security systems. By controlling who has access to your database, you significantly reduce the risk of unauthorized access, both from external attackers and insider threats. + +2. **Compliance with Regulations**: Various regulatory requirements, such as the **General Data Protection Regulation (GDPR)** and the **Health Insurance Portability and Accountability Act (HIPAA)**, mandate stringent access control measures to protect personal data. Implementing access control ensures compliance with these regulations. + +3. **Preventing Data Breaches**: Access control acts as a proactive measure to deter, detect, and prevent unauthorized access. It ensures that only those with the necessary permissions can access sensitive data or services. + +4. **Managing Complexity**: Modern IT infrastructure, including cloud computing and mobile devices, has exponentially increased the number of access points. Technologies like **identity and access management (IAM)** and approaches like **zero trust** help manage this complexity effectively. + +## Types of Security Access Controls + +Several access control models exist, including: + +- **Role-Based Access Control (RBAC)**: Assigns permissions to roles, roles then are granted to users. A user's active role then defines their access. (e.g., admin, user, manager). +- **Attribute-Based Access Control (ABAC)**: Considers various attributes (e.g., user attributes, resource attributes) for access decisions. +- **Discretionary Access Control (DAC)**: Users with sufficient permissions (resource owners) are to grant / share an object with other users. +- **Mandatory Access Control (MAC)**: Users are not allowed to grant access to other users. Permissions are granted based on a minimum role / hierarchy (security labels and clearances) that must be met. +- **Policy-Based Access Control (PBAC)**: Enforces access based on defined policies. +- **Relation-Based Access Control (ReBac)**: Relations between objects and users in the system are used to derive their permissions. + +- Note: **DefraDB** access control rules strongly resembles **Discretionary Access Control (DAC)**, which is implemented through a **Relation-Based Access Control System (ReBac) Engine** + +## Challenges of Access Control in Cybersecurity + +- **Distributed IT Environments**: Cloud computing and remote work create new challenges. +- **Rise of Mobility**: Mobile devices in the workplace add complexity. +- **Password Fatigue**: Balancing security with usability. +- **Data Governance**: Ensuring visibility and control. +- **Multi-Tenancy**: Managing complex permissions in SaaS applications. + +## Key takeaway +A robust access control policy system is your first line of defense against unauthorized access and data breaches. + + +# DefraDB's Access Control System + +## ReBac Authorization Model + +### Zanzibar +In 2019, Google published their [Zanzibar](https://research.google/pubs/zanzibar-googles-consistent-global-authorization-system/) paper, a paper explaining how they handle authorization across their many services. It uses access control lists but with relationship-based access control rather than role-based access control. Relationship-Based Access Control (ReBAC) establishes an authorization model where a subject's permission to access an object is defined by the presence of relationships between those subjects and objects. +The way Zanzibar works is it exposes an API with (mainly) operations to manage `Relationships` (`tuples`) and Verify Access Requests (can Bob do X) through the `Check` call. A `tuple` includes subject, relation, and object. The Check call performs Graph Search over the `tuples` to find a path between the user and the object, if such a path exist then according to `RelBAC` the user has the queried permission. It operates as a Consistent and Partition-Tolerant System. + +### Zanzi +However the Zanzibar API is centralized, so we (Source Network) created a decentralized implementation of Zanzibar called **Zanzi**. Which is powered by our SourceHub trust protocol. Zanzi is a general purpose Zanzibar implementation which operates over a KV persistence layer. + +### SourceHub ACP Module +DefraDB wraps the `local` and `remote` SourceHub ACP Modules to bring all that magic to DefraDB. + +In order to setup the relation based access control, SourceHub requires an agreed upon contract which models the `relations`, `permissions`, and `actors`. That contract is refered to as a `SourceHub Policy`. The policy model's all the `relations` and `permissions` under a `resource`. +A `resource` corresponds to that "thing" that we want to gate the access control around. This can be a `Type`, `Container`, `Schema`, `Shape` or anything that has Objects that need access control. Once the policy is finalized, it has to be uploaded to the `SourceHub Module` so it can be used. +Once the `Policy` is uploaded to the `SourceHub Module` then an `Actor` can begin registering the `Object` for access control by linking to a `Resource` that exists on the uploaded `Policy`. +After the `Object` is registered successfully, the `Actor` will then get a special built-in relation with that `Object` called the `"owner"` relation. This relation is given to the `Registerer` of an `Object`. +Then an `Actor` can issue `Check` calls to see if they have access to an `Object`. + +## Document Access Control (DAC) +In DefraDB's case we wanted to gate access control around the `Documents` that belonged to a specific `Collection`. Here, the `Collection` (i.e. the type/shape of the `Object`) can be thought of as the `Resource`, and the `Documents` are the `Objects`. + + +## Field Access Control (FAC) (coming soon) +We also want the ability to do a more granular access control than just DAC. Therefore we have `Field` level access control for situations where some fields of a `Document` need to be private, while others do not. In this case the `Document` becomes the `Resource` and the `Fields` are the `Objects` being gated. + + +## Admin Access Control (AAC) (coming soon) +We also want to model access control around the `Admin Level Operations` that exist in `DefraDB`. In this case the entire `Database` would be the `Resource` and the `Admin Level Operations` are the `Objects` being gated. + +A non-exhastive list of some operations only admins should have access for: +- Ability to turnoff ACP +- Ability to interact with the P2P system + +## SourceHub Policies Are Too Flexible +SourceHub Policies are too flexible (atleast until the ability to define `Meta Policies` is implemented). This is because SourceHub leaves it up to the user to specify any type of `Permissions` and `Relations`. However for DefraDB, there are certain guarantees that **MUST** be maintained in order for the `Policy` to be effective. For example the user can input any name for a `Permission`, or `Relation` that DefraDB has no knowledge of. Another example is when a user might make a `Policy` that does not give any `Permission` to the `owner`. Which means in the case of DAC no one will have any access to the `Document` they created. +Therefore There was a very clear need to define some rules while writing a `Resource` in a `Policy` which will be used with DefraDB's DAC, FAC, or AAC. These rules will guarantee that certain `Required Permissions` will always be there on a `Resource` and that `Owner` has the correct `Permissions`. + +We call these rules DPI A.K.A DefraDB Policy Interface. + +## Terminologies +- 'SourceHub Address' is a `Bech32` Address with a specific SourceHub prefix. +- 'Identity' is a combination of SourceHub Address and a Key-Pair Signature. +- 'DPI' means 'DefraDB Policy Interface'. +- 'Partially-DPI' policy means a policy with at least one DPI compliant resource. +- 'Permissioned Collection' means to have a policy on the collection, like: `@policy(id:".." resource: "..")` +- 'Permissioned Request' means to have a request with a SourceHub Identity. + + +## DAC DPI Rules + +To qualify as a DPI-compliant `resource`, the following rules **MUST** be satisfied: +- The resource **must include** the mandatory `registerer` (`owner`) relation within the `relations` attribute. +- The resource **must encompass** all the required permissions under the `permissions` attribute. +- Every required permission must have the required registerer relation (`owner`) in `expr`. +- The required registerer relation **must be positioned** as the leading (first) relation in `expr` (see example below). +- Any relation after the required registerer relation must only be a union set operation (`+`). + +For a `Policy` to be `DPI` compliant for DAC, all of its `resources` must be DPI compliant. +To be `Partially-DPI` at least one of its `resource` must be DPI compliant. + +### More Into The Weeds: + +All mandatory permissions are: +- Specified in the `dpi.go` file within the variable `dpiRequiredPermissions`. + +The name of the required 'registerer' relation is: +- Specified in the `dpi.go` file within the variable `requiredRegistererRelationName`. + +### DPI Resource Examples: +- Check out tests here: [tests/integration/acp/schema/add_dpi](/tests/integration/acp/schema/add_dpi) +- The tests linked are broken into `accept_*_test.go` and `reject_*_test.go` files. +- Accepted tests document the valid DPIs (as the schema is accepted). +- Rejected tests document invalid DPIs (as the schema is rejected). +- There are also some Partially-DPI tests that are both accepted and rejected depending on the resource. + +### Required Permission's Expression: +Even though the following expressions are valid generic policy expressions, they will make a +DPI compliant resource lose its DPI status as these expressions are not in accordance to +our DPI [rules](#dac-dpi-rules). Assuming these `expr` are under a required permission label: +- `expr: owner-owner` +- `expr: owner-reader` +- `expr: owner&reader` +- `expr: owner - reader` +- `expr: ownerMalicious + owner` +- `expr: ownerMalicious` +- `expr: owner_new` +- `expr: reader+owner` +- `expr: reader-owner` +- `expr: reader - owner` + +Here are some valid expression examples. Assuming these `expr` are under a required permission label: +- `expr: owner` +- `expr: owner + reader` +- `expr: owner +reader` +- `expr: owner+reader` + + +## DAC Usage CLI: + +### Adding a Policy: + +We have in `examples/dpi_policy/user_dpi_policy.yml`: +```yaml +description: A Valid DefraDB Policy Interface (DPI) + +actor: + name: actor + +resources: + users: + permissions: + read: + expr: owner + reader + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor +``` + +CLI Command: +```sh +defradb client acp policy add -i cosmos1f2djr7dl9vhrk3twt3xwqp09nhtzec9mdkf70j -f examples/dpi_policy/user_dpi_policy.yml + +``` + +Result: +```json +{ + "PolicyID": "24ab8cba6d6f0bcfe4d2712c7d95c09dd1b8076ea5a8896476413fd6c891c18c" +} +``` + +### Add schema, linking to a resource within the policy we added: + +We have in `examples/schema/permissioned/users.graphql`: +```graphql +type Users @policy( + id: "24ab8cba6d6f0bcfe4d2712c7d95c09dd1b8076ea5a8896476413fd6c891c18c", + resource: "users" +) { + name: String + age: Int +} +``` + +CLI Command: +```sh +defradb client schema add -f examples/schema/permissioned/users.graphql +``` + +Result: +```json +[ + { + "Name": "Users", + "ID": 1, + "RootID": 1, + "SchemaVersionID": "bafkreihhd6bqrjhl5zidwztgxzeseveplv3cj3fwtn3unjkdx7j2vr2vrq", + "Sources": [], + "Fields": [ + { + "Name": "_docID", + "ID": 0 + }, + { + "Name": "age", + "ID": 1 + }, + { + "Name": "name", + "ID": 2 + } + ], + "Indexes": [], + "Policy": { + "ID": "24ab8cba6d6f0bcfe4d2712c7d95c09dd1b8076ea5a8896476413fd6c891c18c", + "ResourceName": "users" + } + } +] + +``` + +### Create private documents (with identity) + +CLI Command: +```sh +defradb client collection create -i cosmos1f2djr7dl9vhrk3twt3xwqp09nhtzec9mdkf70j --name Users '[{ "name": "SecretShahzad" }, { "name": "SecretLone" }]' +``` + +### Create public documents (without identity) + +CLI Command: +```sh +defradb client collection create --name Users '[{ "name": "PublicShahzad" }, { "name": "PublicLone" }]' +``` + +### Get all docIDs without an identity (shows only public): +CLI Command: +```sh +defradb client collection docIDs -i cosmos1f2djr7dl9vhrk3twt3xwqp09nhtzec9mdkf70j +``` + +Result: +```json +{ + "docID": "bae-63ba68c9-78cb-5060-ab03-53ead1ec5b83", + "error": "" +} +{ + "docID": "bae-ba315e98-fb37-5225-8a3b-34a1c75cba9e", + "error": "" +} +``` + + +### Get all docIDs with an identity (shows public and owned documents): +```sh +defradb client collection docIDs -i cosmos1f2djr7dl9vhrk3twt3xwqp09nhtzec9mdkf70j +``` + +Result: +```json +{ + "docID": "bae-63ba68c9-78cb-5060-ab03-53ead1ec5b83", + "error": "" +} +{ + "docID": "bae-a5830219-b8e7-5791-9836-2e494816fc0a", + "error": "" +} +{ + "docID": "bae-ba315e98-fb37-5225-8a3b-34a1c75cba9e", + "error": "" +} +{ + "docID": "bae-eafad571-e40c-55a7-bc41-3cf7d61ee891", + "error": "" +} +``` + + +### Access the private document (including field names): +CLI Command: +```sh +defradb client collection get -i cosmos1f2djr7dl9vhrk3twt3xwqp09nhtzec9mdkf70j --name Users "bae-a5830219-b8e7-5791-9836-2e494816fc0a" +``` + +Result: +```json +{ + "_docID": "bae-a5830219-b8e7-5791-9836-2e494816fc0a", + "name": "SecretShahzad" +} +``` + +### Accessing the private document without an identity: +CLI Command: +```sh +defradb client collection get --name Users "bae-a5830219-b8e7-5791-9836-2e494816fc0a" +``` + +Error: +``` + Error: document not found or not authorized to access +``` + +### Accessing the private document with wrong identity: +CLI Command: +```sh +defradb client collection get -i cosmos1x25hhksxhu86r45hqwk28dd70qzux3262hdrll --name Users "bae-a5830219-b8e7-5791-9836-2e494816fc0a" +``` + +Error: +``` + Error: document not found or not authorized to access +``` + +### Update private document: +CLI Command: +```sh +defradb client collection update -i cosmos1f2djr7dl9vhrk3twt3xwqp09nhtzec9mdkf70j --name Users --docID "bae-a5830219-b8e7-5791-9836-2e494816fc0a" --updater '{ "name": "SecretUpdatedShahzad" }' +``` + +Result: +```json +{ + "Count": 1, + "DocIDs": [ + "bae-a5830219-b8e7-5791-9836-2e494816fc0a" + ] +} +``` + +#### Check if it actually got updated: +CLI Command: +```sh +defradb client collection get -i cosmos1f2djr7dl9vhrk3twt3xwqp09nhtzec9mdkf70j --name Users "bae-a5830219-b8e7-5791-9836-2e494816fc0a" +``` + +Result: +```json +{ + "_docID": "bae-a5830219-b8e7-5791-9836-2e494816fc0a", + "name": "SecretUpdatedShahzad" +} +``` + +### Update With Filter example (coming soon) + +### Delete private document: +CLI Command: +```sh +defradb client collection delete -i cosmos1f2djr7dl9vhrk3twt3xwqp09nhtzec9mdkf70j --name Users --docID "bae-a5830219-b8e7-5791-9836-2e494816fc0a" +``` + +Result: +```json +{ + "Count": 1, + "DocIDs": [ + "bae-a5830219-b8e7-5791-9836-2e494816fc0a" + ] +} +``` + +#### Check if it actually got deleted: +CLI Command: +```sh +defradb client collection get -i cosmos1f2djr7dl9vhrk3twt3xwqp09nhtzec9mdkf70j --name Users "bae-a5830219-b8e7-5791-9836-2e494816fc0a" +``` + +Error: +``` + Error: document not found or not authorized to access +``` + +### Delete With Filter example (coming soon) + +### Typejoin example (coming soon) + +### View example (coming soon) + +### P2P example (coming soon) + +### Backup / Import example (coming soon) + +### Secondary Indexes example (coming soon) + +### Execute Explain example (coming soon) + + +## DAC Usage HTTP: +HTTP requests work similar to their CLI counter parts, the main difference is that the identity will just be specified within the Auth Header like so: `Authorization: Basic `. + +Note: The `Basic` label will change to `Bearer ` after JWS Authentication Tokens are supported. + +## _AAC DPI Rules (coming soon)_ +## _AAC Usage: (coming soon)_ + +## _FAC DPI Rules (coming soon)_ +## _FAC Usage: (coming soon)_ + +## Warning / Caveats +The following features currently don't work with ACP, they are being actively worked on. +- [P2P: Adding a replicator with permissioned collection](https://github.com/sourcenetwork/defradb/issues/2366) +- [P2P: Subscription to a permissioned collection](https://github.com/sourcenetwork/defradb/issues/2366) +- [Adding Secondary Indexes](https://github.com/sourcenetwork/defradb/issues/2365) +- [Backing/Restoring Private Documents](https://github.com/sourcenetwork/defradb/issues/2430) + +The following features may have undefined/unstable behavior until they are properly tested: +- [Views](https://github.com/sourcenetwork/defradb/issues/2018) +- [Average Operations](https://github.com/sourcenetwork/defradb/issues/2475) +- [Count Operations](https://github.com/sourcenetwork/defradb/issues/2474) +- [Group Operations](https://github.com/sourcenetwork/defradb/issues/2473) +- [Limit Operations](https://github.com/sourcenetwork/defradb/issues/2472) +- [Order Operations](https://github.com/sourcenetwork/defradb/issues/2471) +- [Sum Operations](https://github.com/sourcenetwork/defradb/issues/2470) +- [Dag/Commit Operations](https://github.com/sourcenetwork/defradb/issues/2469) +- [Delete With Filter Operations](https://github.com/sourcenetwork/defradb/issues/2468) +- [Update With Filter Operations](https://github.com/sourcenetwork/defradb/issues/2467) +- [Type Join Many Operations](https://github.com/sourcenetwork/defradb/issues/2466) +- [Type Join One Operations](https://github.com/sourcenetwork/defradb/issues/2466) +- [Parallel Operations](https://github.com/sourcenetwork/defradb/issues/2465) +- [Execute Explain](https://github.com/sourcenetwork/defradb/issues/2464) diff --git a/acp/acp.go b/acp/acp.go new file mode 100644 index 0000000000..af99bcb86f --- /dev/null +++ b/acp/acp.go @@ -0,0 +1,100 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package acp + +import ( + "context" + + "github.com/sourcenetwork/immutable" + + "github.com/sourcenetwork/corelog" +) + +var ( + log = corelog.NewLogger("acp") + + // NoACP is an empty ACP, this is used to disable access control. + NoACP = immutable.None[ACP]() +) + +// ACP is the interface to all types of access control that might exist. +type ACP interface { + // Init initializes the acp, with an absolute path. The provided path indicates where the + // persistent data will be stored for acp. + // + // If the path is empty then acp will run in memory. + Init(ctx context.Context, path string) + + // Start starts the acp, using the initialized path. Will recover acp state + // from a previous run if under the same path. + // + // If the path is empty then acp will run in memory. + Start(ctx context.Context) error + + // Close closes the resources in use by acp. + Close() error + + // AddPolicy attempts to add the given policy. Detects the format of the policy automatically + // by assuming YAML format if JSON validation fails. Upon success a policyID is returned, + // otherwise returns error. + // + // A policy can not be added without a creator identity (sourcehub address). + AddPolicy(ctx context.Context, creatorID string, policy string) (string, error) + + // ValidateResourceExistsOnValidDPI performs DPI validation of the resource (matching resource name) + // that is on the policy (matching policyID), returns an error upon validation failure. + // + // Learn more about the DefraDB Policy Interface [DPI](/acp/README.md) + ValidateResourceExistsOnValidDPI( + ctx context.Context, + policyID string, + resourceName string, + ) error + + // RegisterDocObject registers the document (object) to have access control. + // No error is returned upon successful registering of a document. + // + // Note(s): + // - This function does not check the collection to see if the document actually exists. + // - Some documents might be created without an identity signature so they would have public access. + // - actorID here is the identity of the actor registering the document object. + RegisterDocObject( + ctx context.Context, + actorID string, + policyID string, + resourceName string, + docID string, + ) error + + // IsDocRegistered returns true if the document was found to be registered, otherwise returns false. + // If check failed then an error and false will be returned. + IsDocRegistered( + ctx context.Context, + policyID string, + resourceName string, + docID string, + ) (bool, error) + + // CheckDocAccess returns true if the check was successfull and the request has access to the document. If + // the check was successful but the request does not have access to the document, then returns false. + // Otherwise if check failed then an error is returned (and the boolean result should not be used). + // + // Note(s): + // - permission here is a valid DPI permission we are checking for ("read" or "write"). + CheckDocAccess( + ctx context.Context, + permission DPIPermission, + actorID string, + policyID string, + resourceName string, + docID string, + ) (bool, error) +} diff --git a/acp/acp_local.go b/acp/acp_local.go new file mode 100644 index 0000000000..e569efd5d0 --- /dev/null +++ b/acp/acp_local.go @@ -0,0 +1,310 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package acp + +import ( + "context" + + protoTypes "github.com/cosmos/gogoproto/types" + "github.com/sourcenetwork/corelog" + "github.com/sourcenetwork/immutable" + "github.com/sourcenetwork/sourcehub/x/acp/embedded" + "github.com/sourcenetwork/sourcehub/x/acp/types" + "github.com/valyala/fastjson" + + "github.com/sourcenetwork/defradb/errors" +) + +var ( + _ ACP = (*ACPLocal)(nil) +) + +// ACPLocal represents a local acp implementation that makes no remote calls. +type ACPLocal struct { + pathToStore immutable.Option[string] + localACP *embedded.LocalACP +} + +func (l *ACPLocal) Init(ctx context.Context, path string) { + if path == "" { + l.pathToStore = immutable.None[string]() + } else { + l.pathToStore = immutable.Some(path) + } +} + +func (l *ACPLocal) Start(ctx context.Context) error { + var localACP embedded.LocalACP + var err error + + if !l.pathToStore.HasValue() { // Use a non-persistent, i.e. in memory store. + localACP, err = embedded.NewLocalACP( + embedded.WithInMemStore(), + ) + + if err != nil { + return NewErrInitializationOfACPFailed(err, "Local", "in-memory") + } + } else { // Use peristent storage. + acpStorePath := l.pathToStore.Value() + "/" + embedded.DefaultDataDir + localACP, err = embedded.NewLocalACP( + embedded.WithPersistentStorage(acpStorePath), + ) + if err != nil { + return NewErrInitializationOfACPFailed(err, "Local", l.pathToStore.Value()) + } + } + + l.localACP = &localACP + return nil +} + +func (l *ACPLocal) Close() error { + return l.localACP.Close() +} + +func (l *ACPLocal) AddPolicy( + ctx context.Context, + creatorID string, + policy string, +) (string, error) { + // Having a creator identity is a MUST requirement for adding a policy. + if creatorID == "" { + return "", ErrPolicyCreatorMustNotBeEmpty + } + + if policy == "" { + return "", ErrPolicyDataMustNotBeEmpty + } + + // Assume policy is in YAML format by default. + policyMarshalType := types.PolicyMarshalingType_SHORT_YAML + if isJSON := fastjson.Validate(policy) == nil; isJSON { // Detect JSON format. + policyMarshalType = types.PolicyMarshalingType_SHORT_JSON + } + + createPolicy := types.MsgCreatePolicy{ + Creator: creatorID, + Policy: policy, + MarshalType: policyMarshalType, + CreationTime: protoTypes.TimestampNow(), + } + + createPolicyResponse, err := l.localACP.GetMsgService().CreatePolicy( + l.localACP.GetCtx(), + &createPolicy, + ) + + if err != nil { + return "", NewErrFailedToAddPolicyWithACP(err, "Local", creatorID) + } + + policyID := createPolicyResponse.Policy.Id + log.InfoContext(ctx, "Created Policy", corelog.Any("PolicyID", policyID)) + + return policyID, nil +} + +func (l *ACPLocal) ValidateResourceExistsOnValidDPI( + ctx context.Context, + policyID string, + resourceName string, +) error { + if policyID == "" && resourceName == "" { + return ErrNoPolicyArgs + } + + if policyID == "" { + return ErrPolicyIDMustNotBeEmpty + } + + if resourceName == "" { + return ErrResourceNameMustNotBeEmpty + } + + queryPolicyRequest := types.QueryPolicyRequest{Id: policyID} + queryPolicyResponse, err := l.localACP.GetQueryService().Policy( + l.localACP.GetCtx(), + &queryPolicyRequest, + ) + + if err != nil { + if errors.Is(err, types.ErrPolicyNotFound) { + return newErrPolicyDoesNotExistWithACP(err, policyID) + } else { + return newErrPolicyValidationFailedWithACP(err, policyID) + } + } + + // So far we validated that the policy exists, now lets validate that resource exists. + resourceResponse := queryPolicyResponse.Policy.GetResourceByName(resourceName) + if resourceResponse == nil { + return newErrResourceDoesNotExistOnTargetPolicy(resourceName, policyID) + } + + // Now that we have validated that policyID exists and it contains a corresponding + // resource with the matching name, validate that all required permissions + // for DPI actually exist on the target resource. + for _, requiredPermission := range dpiRequiredPermissions { + permissionResponse := resourceResponse.GetPermissionByName(requiredPermission) + if permissionResponse == nil { + return newErrResourceIsMissingRequiredPermission( + resourceName, + requiredPermission, + policyID, + ) + } + + // Now we need to ensure that the "owner" relation has access to all the required + // permissions for DPI. This is important because even if the policy has the required + // permissions under the resource, it's possible that those permissions are not granted + // to the "owner" relation, this will help users not shoot themseleves in the foot. + // TODO-ACP: Better validation, once sourcehub implements meta-policies. + // Issue: https://github.com/sourcenetwork/defradb/issues/2359 + if err := validateDPIExpressionOfRequiredPermission( + permissionResponse.Expression, + requiredPermission, + ); err != nil { + return err + } + } + + return nil +} + +func (l *ACPLocal) RegisterDocObject( + ctx context.Context, + actorID string, + policyID string, + resourceName string, + docID string, +) error { + registerDoc := types.MsgRegisterObject{ + Creator: actorID, + PolicyId: policyID, + Object: types.NewObject(resourceName, docID), + CreationTime: protoTypes.TimestampNow(), + } + + registerDocResponse, err := l.localACP.GetMsgService().RegisterObject( + l.localACP.GetCtx(), + ®isterDoc, + ) + + if err != nil { + return NewErrFailedToRegisterDocWithACP(err, "Local", policyID, actorID, resourceName, docID) + } + + switch registerDocResponse.Result { + case types.RegistrationResult_NoOp: + return ErrObjectDidNotRegister + + case types.RegistrationResult_Registered: + log.InfoContext( + ctx, + "Document registered with local acp", + corelog.Any("PolicyID", policyID), + corelog.Any("Creator", actorID), + corelog.Any("Resource", resourceName), + corelog.Any("DocID", docID), + ) + return nil + + case types.RegistrationResult_Unarchived: + log.InfoContext( + ctx, + "Document re-registered (unarchived object) with local acp", + corelog.Any("PolicyID", policyID), + corelog.Any("Creator", actorID), + corelog.Any("Resource", resourceName), + corelog.Any("DocID", docID), + ) + return nil + } + + return ErrObjectDidNotRegister +} + +func (l *ACPLocal) IsDocRegistered( + ctx context.Context, + policyID string, + resourceName string, + docID string, +) (bool, error) { + queryObjectOwner := types.QueryObjectOwnerRequest{ + PolicyId: policyID, + Object: types.NewObject(resourceName, docID), + } + + queryObjectOwnerResponse, err := l.localACP.GetQueryService().ObjectOwner( + l.localACP.GetCtx(), + &queryObjectOwner, + ) + if err != nil { + return false, NewErrFailedToCheckIfDocIsRegisteredWithACP(err, "Local", policyID, resourceName, docID) + } + + return queryObjectOwnerResponse.IsRegistered, nil +} + +func (l *ACPLocal) CheckDocAccess( + ctx context.Context, + permission DPIPermission, + actorID string, + policyID string, + resourceName string, + docID string, +) (bool, error) { + checkDoc := types.QueryVerifyAccessRequestRequest{ + PolicyId: policyID, + AccessRequest: &types.AccessRequest{ + Operations: []*types.Operation{ + { + Object: types.NewObject(resourceName, docID), + Permission: permission.String(), + }, + }, + Actor: &types.Actor{ + Id: actorID, + }, + }, + } + + checkDocResponse, err := l.localACP.GetQueryService().VerifyAccessRequest( + l.localACP.GetCtx(), + &checkDoc, + ) + if err != nil { + return false, NewErrFailedToVerifyDocAccessWithACP(err, "Local", policyID, actorID, resourceName, docID) + } + + if checkDocResponse.Valid { + log.InfoContext( + ctx, + "Document accessible", + corelog.Any("PolicyID", policyID), + corelog.Any("ActorID", actorID), + corelog.Any("Resource", resourceName), + corelog.Any("DocID", docID), + ) + return true, nil + } else { + log.InfoContext( + ctx, + "Document inaccessible", + corelog.Any("PolicyID", policyID), + corelog.Any("ActorID", actorID), + corelog.Any("Resource", resourceName), + corelog.Any("DocID", docID), + ) + return false, nil + } +} diff --git a/acp/acp_local_test.go b/acp/acp_local_test.go new file mode 100644 index 0000000000..9abdcb04d1 --- /dev/null +++ b/acp/acp_local_test.go @@ -0,0 +1,654 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package acp + +import ( + "context" + "testing" + + "github.com/stretchr/testify/require" +) + +var identity1 = "cosmos1zzg43wdrhmmk89z3pmejwete2kkd4a3vn7w969" +var identity2 = "cosmos1x25hhksxhu86r45hqwk28dd70qzux3262hdrll" + +var validPolicyID string = "4f13c5084c3d0e1e5c5db702fceef84c3b6ab948949ca8e27fcaad3fb8bc39f4" +var validPolicy string = ` +description: a policy + +actor: + name: actor + +resources: + users: + permissions: + write: + expr: owner + read: + expr: owner + reader + + relations: + owner: + types: + - actor + reader: + types: + - actor + ` + +func Test_LocalACP_InMemory_StartAndClose_NoError(t *testing.T) { + ctx := context.Background() + var localACP ACPLocal + + localACP.Init(ctx, "") + err := localACP.Start(ctx) + + require.Nil(t, err) + + err = localACP.Close() + require.Nil(t, err) +} + +func Test_LocalACP_PersistentMemory_StartAndClose_NoError(t *testing.T) { + acpPath := t.TempDir() + require.NotEqual(t, "", acpPath) + + ctx := context.Background() + var localACP ACPLocal + + localACP.Init(ctx, acpPath) + err := localACP.Start(ctx) + require.Nil(t, err) + + err = localACP.Close() + require.Nil(t, err) +} + +func Test_LocalACP_InMemory_AddPolicy_CanCreateTwice(t *testing.T) { + ctx := context.Background() + var localACP ACPLocal + + localACP.Init(ctx, "") + errStart := localACP.Start(ctx) + require.Nil(t, errStart) + + policyID, errAddPolicy := localACP.AddPolicy( + ctx, + identity1, + validPolicy, + ) + require.Nil(t, errAddPolicy) + + require.Equal( + t, + validPolicyID, + policyID, + ) + + errClose := localACP.Close() + require.Nil(t, errClose) + + // Since nothing is persisted should allow adding same policy again. + + localACP.Init(ctx, "") + errStart = localACP.Start(ctx) + require.Nil(t, errStart) + + policyID, errAddPolicy = localACP.AddPolicy( + ctx, + identity1, + validPolicy, + ) + require.Nil(t, errAddPolicy) + require.Equal( + t, + validPolicyID, + policyID, + ) + + errClose = localACP.Close() + require.Nil(t, errClose) +} + +func Test_LocalACP_PersistentMemory_AddPolicy_CanNotCreateTwice(t *testing.T) { + acpPath := t.TempDir() + require.NotEqual(t, "", acpPath) + + ctx := context.Background() + var localACP ACPLocal + + localACP.Init(ctx, acpPath) + errStart := localACP.Start(ctx) + require.Nil(t, errStart) + + policyID, errAddPolicy := localACP.AddPolicy( + ctx, + identity1, + validPolicy, + ) + require.Nil(t, errAddPolicy) + require.Equal( + t, + validPolicyID, + policyID, + ) + + errClose := localACP.Close() + require.Nil(t, errClose) + + // The above policy should remain persisted on restarting ACP. + + localACP.Init(ctx, acpPath) + errStart = localACP.Start(ctx) + require.Nil(t, errStart) + + // Should not allow us to create the same policy again as it exists already. + _, errAddPolicy = localACP.AddPolicy( + ctx, + identity1, + validPolicy, + ) + require.Error(t, errAddPolicy) + require.ErrorIs(t, errAddPolicy, ErrFailedToAddPolicyWithACP) + + errClose = localACP.Close() + require.Nil(t, errClose) +} + +func Test_LocalACP_InMemory_ValidateResourseExistsOrNot_ErrIfDoesntExist(t *testing.T) { + ctx := context.Background() + var localACP ACPLocal + + localACP.Init(ctx, "") + errStart := localACP.Start(ctx) + require.Nil(t, errStart) + + policyID, errAddPolicy := localACP.AddPolicy( + ctx, + identity1, + validPolicy, + ) + require.Nil(t, errAddPolicy) + require.Equal( + t, + validPolicyID, + policyID, + ) + + errValidateResourceExists := localACP.ValidateResourceExistsOnValidDPI( + ctx, + validPolicyID, + "users", + ) + require.Nil(t, errValidateResourceExists) + + errValidateResourceExists = localACP.ValidateResourceExistsOnValidDPI( + ctx, + validPolicyID, + "resourceDoesNotExist", + ) + require.Error(t, errValidateResourceExists) + require.ErrorIs(t, errValidateResourceExists, ErrResourceDoesNotExistOnTargetPolicy) + + errValidateResourceExists = localACP.ValidateResourceExistsOnValidDPI( + ctx, + "invalidPolicyID", + "resourceDoesNotExist", + ) + require.Error(t, errValidateResourceExists) + require.ErrorIs(t, errValidateResourceExists, ErrPolicyDoesNotExistWithACP) + + errClose := localACP.Close() + require.Nil(t, errClose) +} + +func Test_LocalACP_PersistentMemory_ValidateResourseExistsOrNot_ErrIfDoesntExist(t *testing.T) { + acpPath := t.TempDir() + require.NotEqual(t, "", acpPath) + + ctx := context.Background() + var localACP ACPLocal + + localACP.Init(ctx, acpPath) + errStart := localACP.Start(ctx) + require.Nil(t, errStart) + + policyID, errAddPolicy := localACP.AddPolicy( + ctx, + identity1, + validPolicy, + ) + require.Nil(t, errAddPolicy) + require.Equal( + t, + validPolicyID, + policyID, + ) + + errValidateResourceExists := localACP.ValidateResourceExistsOnValidDPI( + ctx, + validPolicyID, + "users", + ) + require.Nil(t, errValidateResourceExists) + + // Resource should still exist even after a restart. + errClose := localACP.Close() + require.Nil(t, errClose) + + localACP.Init(ctx, acpPath) + errStart = localACP.Start(ctx) + require.Nil(t, errStart) + + // Do the same check after restart. + errValidateResourceExists = localACP.ValidateResourceExistsOnValidDPI( + ctx, + validPolicyID, + "users", + ) + require.Nil(t, errValidateResourceExists) + + errValidateResourceExists = localACP.ValidateResourceExistsOnValidDPI( + ctx, + validPolicyID, + "resourceDoesNotExist", + ) + require.Error(t, errValidateResourceExists) + require.ErrorIs(t, errValidateResourceExists, ErrResourceDoesNotExistOnTargetPolicy) + + errValidateResourceExists = localACP.ValidateResourceExistsOnValidDPI( + ctx, + "invalidPolicyID", + "resourceDoesNotExist", + ) + require.Error(t, errValidateResourceExists) + require.ErrorIs(t, errValidateResourceExists, ErrPolicyDoesNotExistWithACP) + + errClose = localACP.Close() + require.Nil(t, errClose) +} + +func Test_LocalACP_InMemory_IsDocRegistered_TrueIfRegisteredFalseIfNotAndErrorOtherwise(t *testing.T) { + ctx := context.Background() + var localACP ACPLocal + + localACP.Init(ctx, "") + errStart := localACP.Start(ctx) + require.Nil(t, errStart) + + policyID, errAddPolicy := localACP.AddPolicy( + ctx, + identity1, + validPolicy, + ) + require.Nil(t, errAddPolicy) + require.Equal( + t, + validPolicyID, + policyID, + ) + + // Invalid empty doc and empty resource can't be registered. + errRegisterDoc := localACP.RegisterDocObject( + ctx, + identity1, + validPolicyID, + "", + "", + ) + require.Error(t, errRegisterDoc) + require.ErrorIs(t, errRegisterDoc, ErrFailedToRegisterDocWithACP) + + // Check if an invalid empty doc and empty resource is registered. + isDocRegistered, errDocRegistered := localACP.IsDocRegistered( + ctx, + validPolicyID, + "", + "", + ) + require.Error(t, errDocRegistered) + require.ErrorIs(t, errDocRegistered, ErrFailedToCheckIfDocIsRegisteredWithACP) + require.False(t, isDocRegistered) + + // No documents are registered right now so return false. + isDocRegistered, errDocRegistered = localACP.IsDocRegistered( + ctx, + validPolicyID, + "users", + "documentID_XYZ", + ) + require.Nil(t, errDocRegistered) + require.False(t, isDocRegistered) + + // Register a document. + errRegisterDoc = localACP.RegisterDocObject( + ctx, + identity1, + validPolicyID, + "users", + "documentID_XYZ", + ) + require.Nil(t, errRegisterDoc) + + // Now it should be registered. + isDocRegistered, errDocRegistered = localACP.IsDocRegistered( + ctx, + validPolicyID, + "users", + "documentID_XYZ", + ) + + require.Nil(t, errDocRegistered) + require.True(t, isDocRegistered) + + errClose := localACP.Close() + require.Nil(t, errClose) +} + +func Test_LocalACP_PersistentMemory_IsDocRegistered_TrueIfRegisteredFalseIfNotAndErrorOtherwise(t *testing.T) { + acpPath := t.TempDir() + require.NotEqual(t, "", acpPath) + + ctx := context.Background() + var localACP ACPLocal + + localACP.Init(ctx, acpPath) + errStart := localACP.Start(ctx) + require.Nil(t, errStart) + + policyID, errAddPolicy := localACP.AddPolicy( + ctx, + identity1, + validPolicy, + ) + require.Nil(t, errAddPolicy) + require.Equal( + t, + validPolicyID, + policyID, + ) + + // Invalid empty doc and empty resource can't be registered. + errRegisterDoc := localACP.RegisterDocObject( + ctx, + identity1, + validPolicyID, + "", + "", + ) + require.Error(t, errRegisterDoc) + require.ErrorIs(t, errRegisterDoc, ErrFailedToRegisterDocWithACP) + + // Check if an invalid empty doc and empty resource is registered. + isDocRegistered, errDocRegistered := localACP.IsDocRegistered( + ctx, + validPolicyID, + "", + "", + ) + require.Error(t, errDocRegistered) + require.ErrorIs(t, errDocRegistered, ErrFailedToCheckIfDocIsRegisteredWithACP) + require.False(t, isDocRegistered) + + // No documents are registered right now so return false. + isDocRegistered, errDocRegistered = localACP.IsDocRegistered( + ctx, + validPolicyID, + "users", + "documentID_XYZ", + ) + require.Nil(t, errDocRegistered) + require.False(t, isDocRegistered) + + // Register a document. + errRegisterDoc = localACP.RegisterDocObject( + ctx, + identity1, + validPolicyID, + "users", + "documentID_XYZ", + ) + require.Nil(t, errRegisterDoc) + + // Now it should be registered. + isDocRegistered, errDocRegistered = localACP.IsDocRegistered( + ctx, + validPolicyID, + "users", + "documentID_XYZ", + ) + + require.Nil(t, errDocRegistered) + require.True(t, isDocRegistered) + + // Should stay registered even after a restart. + errClose := localACP.Close() + require.Nil(t, errClose) + + localACP.Init(ctx, acpPath) + errStart = localACP.Start(ctx) + require.Nil(t, errStart) + + // Check after restart if it is still registered. + isDocRegistered, errDocRegistered = localACP.IsDocRegistered( + ctx, + validPolicyID, + "users", + "documentID_XYZ", + ) + + require.Nil(t, errDocRegistered) + require.True(t, isDocRegistered) + + errClose = localACP.Close() + require.Nil(t, errClose) +} + +func Test_LocalACP_InMemory_CheckDocAccess_TrueIfHaveAccessFalseIfNotErrorOtherwise(t *testing.T) { + ctx := context.Background() + var localACP ACPLocal + + localACP.Init(ctx, "") + errStart := localACP.Start(ctx) + require.Nil(t, errStart) + + policyID, errAddPolicy := localACP.AddPolicy( + ctx, + identity1, + validPolicy, + ) + require.Nil(t, errAddPolicy) + require.Equal( + t, + validPolicyID, + policyID, + ) + + // Invalid empty arguments such that we can't check doc access. + hasAccess, errCheckDocAccess := localACP.CheckDocAccess( + ctx, + ReadPermission, + identity1, + validPolicyID, + "", + "", + ) + require.Error(t, errCheckDocAccess) + require.ErrorIs(t, errCheckDocAccess, ErrFailedToVerifyDocAccessWithACP) + require.False(t, hasAccess) + + // Check document accesss for a document that does not exist. + hasAccess, errCheckDocAccess = localACP.CheckDocAccess( + ctx, + ReadPermission, + identity1, + validPolicyID, + "users", + "documentID_XYZ", + ) + require.Nil(t, errCheckDocAccess) + require.False(t, hasAccess) + + // Register a document. + errRegisterDoc := localACP.RegisterDocObject( + ctx, + identity1, + validPolicyID, + "users", + "documentID_XYZ", + ) + require.Nil(t, errRegisterDoc) + + // Now check using correct identity if it has access. + hasAccess, errCheckDocAccess = localACP.CheckDocAccess( + ctx, + ReadPermission, + identity1, + validPolicyID, + "users", + "documentID_XYZ", + ) + require.Nil(t, errCheckDocAccess) + require.True(t, hasAccess) + + // Now check using wrong identity, it should not have access. + hasAccess, errCheckDocAccess = localACP.CheckDocAccess( + ctx, + ReadPermission, + identity2, + validPolicyID, + "users", + "documentID_XYZ", + ) + require.Nil(t, errCheckDocAccess) + require.False(t, hasAccess) + + errClose := localACP.Close() + require.Nil(t, errClose) +} + +func Test_LocalACP_PersistentMemory_CheckDocAccess_TrueIfHaveAccessFalseIfNotErrorOtherwise(t *testing.T) { + acpPath := t.TempDir() + require.NotEqual(t, "", acpPath) + + ctx := context.Background() + var localACP ACPLocal + + localACP.Init(ctx, acpPath) + errStart := localACP.Start(ctx) + require.Nil(t, errStart) + + policyID, errAddPolicy := localACP.AddPolicy( + ctx, + identity1, + validPolicy, + ) + require.Nil(t, errAddPolicy) + require.Equal( + t, + validPolicyID, + policyID, + ) + + // Invalid empty arguments such that we can't check doc access. + hasAccess, errCheckDocAccess := localACP.CheckDocAccess( + ctx, + ReadPermission, + identity1, + validPolicyID, + "", + "", + ) + require.Error(t, errCheckDocAccess) + require.ErrorIs(t, errCheckDocAccess, ErrFailedToVerifyDocAccessWithACP) + require.False(t, hasAccess) + + // Check document accesss for a document that does not exist. + hasAccess, errCheckDocAccess = localACP.CheckDocAccess( + ctx, + ReadPermission, + identity1, + validPolicyID, + "users", + "documentID_XYZ", + ) + require.Nil(t, errCheckDocAccess) + require.False(t, hasAccess) + + // Register a document. + errRegisterDoc := localACP.RegisterDocObject( + ctx, + identity1, + validPolicyID, + "users", + "documentID_XYZ", + ) + require.Nil(t, errRegisterDoc) + + // Now check using correct identity if it has access. + hasAccess, errCheckDocAccess = localACP.CheckDocAccess( + ctx, + ReadPermission, + identity1, + validPolicyID, + "users", + "documentID_XYZ", + ) + require.Nil(t, errCheckDocAccess) + require.True(t, hasAccess) + + // Now check using wrong identity, it should not have access. + hasAccess, errCheckDocAccess = localACP.CheckDocAccess( + ctx, + ReadPermission, + identity2, + validPolicyID, + "users", + "documentID_XYZ", + ) + require.Nil(t, errCheckDocAccess) + require.False(t, hasAccess) + + // identities should continue having their correct behaviour and access even after a restart. + errClose := localACP.Close() + require.Nil(t, errClose) + + localACP.Init(ctx, acpPath) + errStart = localACP.Start(ctx) + require.Nil(t, errStart) + + // Now check again after the restart using correct identity if it still has access. + hasAccess, errCheckDocAccess = localACP.CheckDocAccess( + ctx, + ReadPermission, + identity1, + validPolicyID, + "users", + "documentID_XYZ", + ) + require.Nil(t, errCheckDocAccess) + require.True(t, hasAccess) + + // Now check again after restart using wrong identity, it should continue to not have access. + hasAccess, errCheckDocAccess = localACP.CheckDocAccess( + ctx, + ReadPermission, + identity2, + validPolicyID, + "users", + "documentID_XYZ", + ) + require.Nil(t, errCheckDocAccess) + require.False(t, hasAccess) + + errClose = localACP.Close() + require.Nil(t, errClose) +} diff --git a/acp/doc.go b/acp/doc.go new file mode 100644 index 0000000000..3fd60dd147 --- /dev/null +++ b/acp/doc.go @@ -0,0 +1,17 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +/* +Package acp utilizes the sourcehub acp module to bring the functionality +to defradb, this package also helps avoid the leakage of direct sourcehub +references through out the code base, and eases in swapping between local +use case and a more global on sourcehub use case. +*/ +package acp diff --git a/acp/dpi.go b/acp/dpi.go new file mode 100644 index 0000000000..85da972131 --- /dev/null +++ b/acp/dpi.go @@ -0,0 +1,73 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package acp + +import ( + "strings" +) + +type DPIPermission int + +// Valid DefraDB Policy Interface Permission Type. +const ( + ReadPermission DPIPermission = iota + WritePermission +) + +// List of all valid DPI permissions, the order of permissions in this list must match +// the above defined ordering such that iota matches the index position within the list. +var dpiRequiredPermissions = []string{ + "read", + "write", +} + +func (dpiPermission DPIPermission) String() string { + return dpiRequiredPermissions[dpiPermission] +} + +const requiredRegistererRelationName string = "owner" + +// validateDPIExpressionOfRequiredPermission validates that the expression under the +// permission is valid. Moreover, DPI requires that for all required permissions, the +// expression start with "owner" then a space or symbol, and then follow-up expression. +// This is important because even if the policy has the required permissions under the +// resource, it's still possible that those permissions are not granted to the "owner" +// relation. This validation will help users not shoot themseleves in the foot. +// +// Learn more about the DefraDB Policy Interface [ACP](/acp/README.md), can find more +// detailed valid and invalid `expr` (expression) examples there. +func validateDPIExpressionOfRequiredPermission(expression string, requiredPermission string) error { + exprNoSpace := strings.ReplaceAll(expression, " ", "") + + if !strings.HasPrefix(exprNoSpace, requiredRegistererRelationName) { + return newErrExprOfRequiredPermissionMustStartWithRelation( + requiredPermission, + requiredRegistererRelationName, + ) + } + + restOfTheExpr := exprNoSpace[len(requiredRegistererRelationName):] + if len(restOfTheExpr) != 0 { + c := restOfTheExpr[0] + // First non-space character after the required relation name MUST be a `+`. + // The reason we are enforcing this here is because other set operations are + // not applied to the registerer relation anyways. + if c != '+' { + return newErrExprOfRequiredPermissionHasInvalidChar( + requiredPermission, + requiredRegistererRelationName, + c, + ) + } + } + + return nil +} diff --git a/acp/errors.go b/acp/errors.go new file mode 100644 index 0000000000..307b32f5ad --- /dev/null +++ b/acp/errors.go @@ -0,0 +1,207 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package acp + +import ( + "github.com/sourcenetwork/defradb/errors" +) + +const ( + errInitializationOfACPFailed = "initialization of acp failed" + errStartingACPInEmptyPath = "starting acp in an empty path" + errFailedToAddPolicyWithACP = "failed to add policy with acp" + errFailedToRegisterDocWithACP = "failed to register document with acp" + errFailedToCheckIfDocIsRegisteredWithACP = "failed to check if doc is registered with acp" + errFailedToVerifyDocAccessWithACP = "failed to verify doc access with acp" + + errObjectDidNotRegister = "no-op while registering object (already exists or error) with acp" + errNoPolicyArgs = "missing policy arguments, must have both id and resource" + + errPolicyIDMustNotBeEmpty = "policyID must not be empty" + errPolicyDoesNotExistWithACP = "policyID specified does not exist with acp" + errPolicyValidationFailedWithACP = "policyID validation through acp failed" + + errResourceNameMustNotBeEmpty = "resource name must not be empty" + errResourceDoesNotExistOnTargetPolicy = "resource does not exist on the specified policy" + errResourceIsMissingRequiredPermission = "resource is missing required permission on policy" + + errExprOfRequiredPermMustStartWithRelation = "expr of required permission must start with required relation" + errExprOfRequiredPermHasInvalidChar = "expr of required permission has invalid character after relation" +) + +var ( + ErrInitializationOfACPFailed = errors.New(errInitializationOfACPFailed) + ErrFailedToAddPolicyWithACP = errors.New(errFailedToAddPolicyWithACP) + ErrFailedToRegisterDocWithACP = errors.New(errFailedToRegisterDocWithACP) + ErrFailedToCheckIfDocIsRegisteredWithACP = errors.New(errFailedToCheckIfDocIsRegisteredWithACP) + ErrFailedToVerifyDocAccessWithACP = errors.New(errFailedToVerifyDocAccessWithACP) + ErrPolicyDoesNotExistWithACP = errors.New(errPolicyDoesNotExistWithACP) + + ErrResourceDoesNotExistOnTargetPolicy = errors.New(errResourceDoesNotExistOnTargetPolicy) + + ErrPolicyDataMustNotBeEmpty = errors.New("policy data can not be empty") + ErrPolicyCreatorMustNotBeEmpty = errors.New("policy creator can not be empty") + ErrObjectDidNotRegister = errors.New(errObjectDidNotRegister) + ErrNoPolicyArgs = errors.New(errNoPolicyArgs) + ErrPolicyIDMustNotBeEmpty = errors.New(errPolicyIDMustNotBeEmpty) + ErrResourceNameMustNotBeEmpty = errors.New(errResourceNameMustNotBeEmpty) +) + +func NewErrInitializationOfACPFailed( + inner error, + Type string, + path string, +) error { + return errors.Wrap( + errInitializationOfACPFailed, + inner, + errors.NewKV("Type", Type), + errors.NewKV("Path", path), + ) +} + +func NewErrFailedToAddPolicyWithACP( + inner error, + Type string, + creatorID string, +) error { + return errors.Wrap( + errFailedToAddPolicyWithACP, + inner, + errors.NewKV("Type", Type), + errors.NewKV("CreatorID", creatorID), + ) +} + +func NewErrFailedToRegisterDocWithACP( + inner error, + Type string, + policyID string, + creatorID string, + resourceName string, + docID string, +) error { + return errors.Wrap( + errFailedToRegisterDocWithACP, + inner, + errors.NewKV("Type", Type), + errors.NewKV("PolicyID", policyID), + errors.NewKV("CreatorID", creatorID), + errors.NewKV("ResourceName", resourceName), + errors.NewKV("DocID", docID), + ) +} + +func NewErrFailedToCheckIfDocIsRegisteredWithACP( + inner error, + Type string, + policyID string, + resourceName string, + docID string, +) error { + return errors.Wrap( + errFailedToCheckIfDocIsRegisteredWithACP, + inner, + errors.NewKV("Type", Type), + errors.NewKV("PolicyID", policyID), + errors.NewKV("ResourceName", resourceName), + errors.NewKV("DocID", docID), + ) +} + +func NewErrFailedToVerifyDocAccessWithACP( + inner error, + Type string, + policyID string, + actorID string, + resourceName string, + docID string, +) error { + return errors.Wrap( + errFailedToVerifyDocAccessWithACP, + inner, + errors.NewKV("Type", Type), + errors.NewKV("PolicyID", policyID), + errors.NewKV("ActorID", actorID), + errors.NewKV("ResourceName", resourceName), + errors.NewKV("DocID", docID), + ) +} + +func newErrPolicyDoesNotExistWithACP( + inner error, + policyID string, +) error { + return errors.Wrap( + errPolicyDoesNotExistWithACP, + inner, + errors.NewKV("PolicyID", policyID), + ) +} + +func newErrPolicyValidationFailedWithACP( + inner error, + policyID string, +) error { + return errors.Wrap( + errPolicyValidationFailedWithACP, + inner, + errors.NewKV("PolicyID", policyID), + ) +} + +func newErrResourceDoesNotExistOnTargetPolicy( + resourceName string, + policyID string, +) error { + return errors.New( + errResourceDoesNotExistOnTargetPolicy, + errors.NewKV("PolicyID", policyID), + errors.NewKV("ResourceName", resourceName), + ) +} + +func newErrResourceIsMissingRequiredPermission( + resourceName string, + permission string, + policyID string, +) error { + return errors.New( + errResourceIsMissingRequiredPermission, + errors.NewKV("PolicyID", policyID), + errors.NewKV("ResourceName", resourceName), + errors.NewKV("Permission", permission), + ) +} + +func newErrExprOfRequiredPermissionMustStartWithRelation( + permission string, + relation string, +) error { + return errors.New( + errExprOfRequiredPermMustStartWithRelation, + errors.NewKV("Permission", permission), + errors.NewKV("Relation", relation), + ) +} + +func newErrExprOfRequiredPermissionHasInvalidChar( + permission string, + relation string, + char byte, +) error { + return errors.New( + errExprOfRequiredPermHasInvalidChar, + errors.NewKV("Permission", permission), + errors.NewKV("Relation", relation), + errors.NewKV("Character", string(char)), + ) +} diff --git a/acp/identity/identity.go b/acp/identity/identity.go new file mode 100644 index 0000000000..108c183748 --- /dev/null +++ b/acp/identity/identity.go @@ -0,0 +1,41 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +/* +Package identity provides defradb identity. +*/ + +package identity + +import "github.com/sourcenetwork/immutable" + +// Identity is the unique identifier for an actor. +type Identity string + +var ( + // None is an empty identity. + None = immutable.None[Identity]() +) + +// New makes a new identity if the input is not empty otherwise, returns None. +func New(identity string) immutable.Option[Identity] { + // TODO-ACP: There will be more validation once sourcehub gets some utilities. + // Then a validation function would do the validation, will likely do outside this function. + // https://github.com/sourcenetwork/defradb/issues/2358 + if identity == "" { + return None + } + return immutable.Some(Identity(identity)) +} + +// String returns the string representation of the identity. +func (i Identity) String() string { + return string(i) +} diff --git a/cli/acp.go b/cli/acp.go new file mode 100644 index 0000000000..30705ac908 --- /dev/null +++ b/cli/acp.go @@ -0,0 +1,29 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package cli + +import ( + "github.com/spf13/cobra" +) + +func MakeACPCommand() *cobra.Command { + var cmd = &cobra.Command{ + Use: "acp", + Short: "Interact with the access control system of a DefraDB node", + Long: `Interact with the access control system of a DefraDB node + +Learn more about [ACP](/acp/README.md) + + `, + } + + return cmd +} diff --git a/cli/acp_policy.go b/cli/acp_policy.go new file mode 100644 index 0000000000..92ae9321f0 --- /dev/null +++ b/cli/acp_policy.go @@ -0,0 +1,25 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package cli + +import ( + "github.com/spf13/cobra" +) + +func MakeACPPolicyCommand() *cobra.Command { + var cmd = &cobra.Command{ + Use: "policy", + Short: "Interact with the acp policy features of DefraDB instance", + Long: `Interact with the acp policy features of DefraDB instance`, + } + + return cmd +} diff --git a/cli/acp_policy_add.go b/cli/acp_policy_add.go new file mode 100644 index 0000000000..bca5e95abd --- /dev/null +++ b/cli/acp_policy_add.go @@ -0,0 +1,119 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package cli + +import ( + "io" + "os" + + "github.com/spf13/cobra" +) + +func MakeACPPolicyAddCommand() *cobra.Command { + const fileFlagLong string = "file" + const fileFlagShort string = "f" + + var policyFile string + + var cmd = &cobra.Command{ + Use: "add [-i --identity] [policy]", + Short: "Add new policy", + Long: `Add new policy + +Notes: + - Can not add a policy without specifying an identity. + - ACP must be available (i.e. ACP can not be disabled). + - A non-DPI policy will be accepted (will be registered with acp system). + - But only a valid DPI policyID & resource can be specified on a schema. + - DPI validation happens when attempting to add a schema with '@policy'. + - Learn more about [ACP & DPI Rules](/acp/README.md) + +Example: add from an argument string: + defradb client acp policy add -i cosmos1f2djr7dl9vhrk3twt3xwqp09nhtzec9mdkf70j ' +description: A Valid DefraDB Policy Interface + +actor: + name: actor + +resources: + users: + permissions: + read: + expr: owner + reader + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor +' + +Example: add from file: + defradb client acp policy add -i cosmos17r39df0hdcrgnmmw4mvu7qgk5nu888c7uvv37y -f policy.yml + +Example: add from file, verbose flags: + defradb client acp policy add --identity cosmos1kpw734v54g0t0d8tcye8ee5jc3gld0tcr2q473 --file policy.yml + +Example: add from stdin: + cat policy.yml | defradb client acp policy add - + +`, + RunE: func(cmd *cobra.Command, args []string) error { + // TODO-ACP: Ensure here (before going through acp system) if the required identity argument + // is valid, if it is valid then keep proceeding further, otherwise return this error: + // `NewErrRequiredFlagInvalid(identityFlagLongRequired, identityFlagShortRequired)` + // Issue: https://github.com/sourcenetwork/defradb/issues/2358 + + // Handle policy argument. + extraArgsProvided := len(args) + var policy string + switch { + case policyFile != "": + data, err := os.ReadFile(policyFile) + if err != nil { + return err + } + policy = string(data) + + case extraArgsProvided > 0 && args[extraArgsProvided-1] == "-": + data, err := io.ReadAll(cmd.InOrStdin()) + if err != nil { + return err + } + policy = string(data) + + case extraArgsProvided > 0: + policy = args[0] + + default: + return ErrPolicyFileArgCanNotBeEmpty + } + + db := mustGetContextDB(cmd) + policyResult, err := db.AddPolicy( + cmd.Context(), + policy, + ) + + if err != nil { + return err + } + + return writeJSON(cmd, policyResult) + }, + } + cmd.Flags().StringVarP(&policyFile, fileFlagLong, fileFlagShort, "", "File to load a policy from") + return cmd +} diff --git a/cli/cli.go b/cli/cli.go index 4cdb8c443b..38209a9f69 100644 --- a/cli/cli.go +++ b/cli/cli.go @@ -16,10 +16,10 @@ package cli import ( "github.com/spf13/cobra" - "github.com/sourcenetwork/defradb/logging" + "github.com/sourcenetwork/corelog" ) -var log = logging.MustNewLogger("cli") +var log = corelog.NewLogger("cli") // NewDefraCommand returns the root command instanciated with its tree of subcommands. func NewDefraCommand() *cobra.Command { @@ -62,6 +62,16 @@ func NewDefraCommand() *cobra.Command { schema_migrate, ) + policy := MakeACPPolicyCommand() + policy.AddCommand( + MakeACPPolicyAddCommand(), + ) + + acp := MakeACPCommand() + acp.AddCommand( + policy, + ) + view := MakeViewCommand() view.AddCommand( MakeViewAddCommand(), @@ -95,6 +105,7 @@ func NewDefraCommand() *cobra.Command { MakeCollectionUpdateCommand(), MakeCollectionCreateCommand(), MakeCollectionDescribeCommand(), + MakeCollectionPatchCommand(), ) client := MakeClientCommand() @@ -102,6 +113,7 @@ func NewDefraCommand() *cobra.Command { MakeDumpCommand(), MakeRequestCommand(), schema, + acp, view, index, p2p, diff --git a/cli/client.go b/cli/client.go index 532712e8f8..06460ca70d 100644 --- a/cli/client.go +++ b/cli/client.go @@ -16,6 +16,7 @@ import ( func MakeClientCommand() *cobra.Command { var txID uint64 + var identity string var cmd = &cobra.Command{ Use: "client", Short: "Interact with a DefraDB node", @@ -28,12 +29,16 @@ Execute queries, add schema types, obtain node info, etc.`, if err := setContextConfig(cmd); err != nil { return err } + if err := setContextIdentity(cmd, identity); err != nil { + return err + } if err := setContextTransaction(cmd, txID); err != nil { return err } - return setContextStore(cmd) + return setContextDB(cmd) }, } + cmd.PersistentFlags().StringVarP(&identity, "identity", "i", "", "ACP Identity") cmd.PersistentFlags().Uint64Var(&txID, "tx", 0, "Transaction ID") return cmd } diff --git a/cli/collection.go b/cli/collection.go index 23ef9194ae..cdf3d41f5a 100644 --- a/cli/collection.go +++ b/cli/collection.go @@ -17,11 +17,11 @@ import ( "github.com/spf13/cobra" "github.com/sourcenetwork/defradb/client" - "github.com/sourcenetwork/defradb/datastore" ) func MakeCollectionCommand() *cobra.Command { var txID uint64 + var identity string var name string var schemaRoot string var versionID string @@ -38,10 +38,13 @@ func MakeCollectionCommand() *cobra.Command { if err := setContextConfig(cmd); err != nil { return err } + if err := setContextIdentity(cmd, identity); err != nil { + return err + } if err := setContextTransaction(cmd, txID); err != nil { return err } - if err := setContextStore(cmd); err != nil { + if err := setContextDB(cmd); err != nil { return err } store := mustGetContextStore(cmd) @@ -71,16 +74,13 @@ func MakeCollectionCommand() *cobra.Command { } col := cols[0] - if tx, ok := cmd.Context().Value(txContextKey).(datastore.Txn); ok { - col = col.WithTxn(tx) - } - ctx := context.WithValue(cmd.Context(), colContextKey, col) cmd.SetContext(ctx) return nil }, } cmd.PersistentFlags().Uint64Var(&txID, "tx", 0, "Transaction ID") + cmd.PersistentFlags().StringVarP(&identity, "identity", "i", "", "ACP Identity") cmd.PersistentFlags().StringVar(&name, "name", "", "Collection name") cmd.PersistentFlags().StringVar(&schemaRoot, "schema", "", "Collection schema Root") cmd.PersistentFlags().StringVar(&versionID, "version", "", "Collection version ID") diff --git a/cli/collection_create.go b/cli/collection_create.go index efeee61494..df7d8794b5 100644 --- a/cli/collection_create.go +++ b/cli/collection_create.go @@ -22,29 +22,27 @@ import ( func MakeCollectionCreateCommand() *cobra.Command { var file string var cmd = &cobra.Command{ - Use: "create ", + Use: "create [-i --identity] ", Short: "Create a new document.", Long: `Create a new document. -Example: create from string +Example: create from string: defradb client collection create --name User '{ "name": "Bob" }' -Example: create multiple from string +Example: create from string, with identity: + defradb client collection create -i cosmos1f2djr7dl9vhrk3twt3xwqp09nhtzec9mdkf70j --name User '{ "name": "Bob" }' + +Example: create multiple from string: defradb client collection create --name User '[{ "name": "Alice" }, { "name": "Bob" }]' -Example: create from file +Example: create from file: defradb client collection create --name User -f document.json -Example: create from stdin +Example: create from stdin: cat document.json | defradb client collection create --name User - `, Args: cobra.RangeArgs(0, 1), RunE: func(cmd *cobra.Command, args []string) error { - col, ok := tryGetContextCollection(cmd) - if !ok { - return cmd.Usage() - } - var docData []byte switch { case file != "": @@ -65,15 +63,20 @@ Example: create from stdin return ErrNoDocOrFile } + col, ok := tryGetContextCollection(cmd) + if !ok { + return cmd.Usage() + } + if client.IsJSONArray(docData) { - docs, err := client.NewDocsFromJSON(docData, col.Schema()) + docs, err := client.NewDocsFromJSON(docData, col.Definition()) if err != nil { return err } return col.CreateMany(cmd.Context(), docs) } - doc, err := client.NewDocFromJSON(docData, col.Schema()) + doc, err := client.NewDocFromJSON(docData, col.Definition()) if err != nil { return err } diff --git a/cli/collection_delete.go b/cli/collection_delete.go index d1f945d9ae..a9776d1985 100644 --- a/cli/collection_delete.go +++ b/cli/collection_delete.go @@ -17,17 +17,20 @@ import ( ) func MakeCollectionDeleteCommand() *cobra.Command { - var argDocIDs []string + var argDocID string var filter string var cmd = &cobra.Command{ - Use: "delete [--filter --docID ]", + Use: "delete [-i --identity] [--filter --docID ]", Short: "Delete documents by docID or filter.", Long: `Delete documents by docID or filter and lists the number of documents deleted. -Example: delete by docID(s) - defradb client collection delete --name User --docID bae-123,bae-456 +Example: delete by docID: + defradb client collection delete --name User --docID bae-123 -Example: delete by filter +Example: delete by docID with identity: + defradb client collection delete -i cosmos1f2djr7dl9vhrk3twt3xwqp09nhtzec9mdkf70j --name User --docID bae-123 + +Example: delete by filter: defradb client collection delete --name User --filter '{ "_gte": { "points": 100 } }' `, RunE: func(cmd *cobra.Command, args []string) error { @@ -37,30 +40,13 @@ Example: delete by filter } switch { - case len(argDocIDs) == 1: - docID, err := client.NewDocIDFromString(argDocIDs[0]) - if err != nil { - return err - } - res, err := col.DeleteWithDocID(cmd.Context(), docID) - if err != nil { - return err - } - return writeJSON(cmd, res) - case len(argDocIDs) > 1: - docIDs := make([]client.DocID, len(argDocIDs)) - for i, v := range argDocIDs { - docID, err := client.NewDocIDFromString(v) - if err != nil { - return err - } - docIDs[i] = docID - } - res, err := col.DeleteWithDocIDs(cmd.Context(), docIDs) + case argDocID != "": + docID, err := client.NewDocIDFromString(argDocID) if err != nil { return err } - return writeJSON(cmd, res) + _, err = col.Delete(cmd.Context(), docID) + return err case filter != "": res, err := col.DeleteWithFilter(cmd.Context(), filter) if err != nil { @@ -72,7 +58,7 @@ Example: delete by filter } }, } - cmd.Flags().StringSliceVar(&argDocIDs, "docID", nil, "Document ID") + cmd.Flags().StringVar(&argDocID, "docID", "", "Document ID") cmd.Flags().StringVar(&filter, "filter", "", "Document filter") return cmd } diff --git a/cli/collection_get.go b/cli/collection_get.go index 55c84d6289..9ad5566f62 100644 --- a/cli/collection_get.go +++ b/cli/collection_get.go @@ -19,12 +19,15 @@ import ( func MakeCollectionGetCommand() *cobra.Command { var showDeleted bool var cmd = &cobra.Command{ - Use: "get [--show-deleted]", + Use: "get [-i --identity] [--show-deleted] ", Short: "View document fields.", Long: `View document fields. Example: defradb client collection get --name User bae-123 + +Example to get a private document we must use an identity: + defradb client collection get -i cosmos1f2djr7dl9vhrk3twt3xwqp09nhtzec9mdkf70j --name User bae-123 `, Args: cobra.ExactArgs(1), RunE: func(cmd *cobra.Command, args []string) error { diff --git a/cli/collection_list_doc_ids.go b/cli/collection_list_doc_ids.go index 7112a88817..168bb74a5a 100644 --- a/cli/collection_list_doc_ids.go +++ b/cli/collection_list_doc_ids.go @@ -18,12 +18,15 @@ import ( func MakeCollectionListDocIDsCommand() *cobra.Command { var cmd = &cobra.Command{ - Use: "docIDs", + Use: "docIDs [-i --identity]", Short: "List all document IDs (docIDs).", Long: `List all document IDs (docIDs). -Example: +Example: list all docID(s): defradb client collection docIDs --name User + +Example: list all docID(s), with an identity: + defradb client collection docIDs -i cosmos1f2djr7dl9vhrk3twt3xwqp09nhtzec9mdkf70j --name User `, RunE: func(cmd *cobra.Command, args []string) error { col, ok := tryGetContextCollection(cmd) diff --git a/cli/collection_patch.go b/cli/collection_patch.go new file mode 100644 index 0000000000..49d5a91305 --- /dev/null +++ b/cli/collection_patch.go @@ -0,0 +1,69 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package cli + +import ( + "fmt" + "io" + "os" + + "github.com/spf13/cobra" +) + +func MakeCollectionPatchCommand() *cobra.Command { + var patchFile string + var cmd = &cobra.Command{ + Use: "patch [patch]", + Short: "Patch existing collection descriptions", + Long: `Patch existing collection descriptions. + +Uses JSON Patch to modify collection descriptions. + +Example: patch from an argument string: + defradb client collection patch '[{ "op": "add", "path": "...", "value": {...} }]' + +Example: patch from file: + defradb client collection patch -p patch.json + +Example: patch from stdin: + cat patch.json | defradb client collection patch - + +To learn more about the DefraDB GraphQL Schema Language, refer to https://docs.source.network.`, + Args: cobra.RangeArgs(0, 1), + RunE: func(cmd *cobra.Command, args []string) error { + store := mustGetContextStore(cmd) + + var patch string + switch { + case patchFile != "": + data, err := os.ReadFile(patchFile) + if err != nil { + return err + } + patch = string(data) + case len(args) > 0 && args[0] == "-": + data, err := io.ReadAll(cmd.InOrStdin()) + if err != nil { + return err + } + patch = string(data) + case len(args) == 1: + patch = args[0] + default: + return fmt.Errorf("patch cannot be empty") + } + + return store.PatchCollection(cmd.Context(), patch) + }, + } + cmd.Flags().StringVarP(&patchFile, "patch-file", "p", "", "File to load a patch from") + return cmd +} diff --git a/cli/collection_update.go b/cli/collection_update.go index 42354948a9..3e676edce9 100644 --- a/cli/collection_update.go +++ b/cli/collection_update.go @@ -17,24 +17,28 @@ import ( ) func MakeCollectionUpdateCommand() *cobra.Command { - var argDocIDs []string + var argDocID string var filter string var updater string var cmd = &cobra.Command{ - Use: "update [--filter --docID --updater ] ", + Use: "update [-i --identity] [--filter --docID --updater ] ", Short: "Update documents by docID or filter.", Long: `Update documents by docID or filter. -Example: update from string +Example: update from string: defradb client collection update --name User --docID bae-123 '{ "name": "Bob" }' -Example: update by filter +Example: update by filter: defradb client collection update --name User \ --filter '{ "_gte": { "points": 100 } }' --updater '{ "verified": true }' -Example: update by docIDs +Example: update by docID: defradb client collection update --name User \ - --docID bae-123,bae-456 --updater '{ "verified": true }' + --docID bae-123 --updater '{ "verified": true }' + +Example: update private docID, with identity: + defradb client collection update -i cosmos1f2djr7dl9vhrk3twt3xwqp09nhtzec9mdkf70j --name User \ + --docID bae-123 --updater '{ "verified": true }' `, Args: cobra.RangeArgs(0, 1), RunE: func(cmd *cobra.Command, args []string) error { @@ -44,38 +48,14 @@ Example: update by docIDs } switch { - case len(argDocIDs) == 1 && updater != "": - docID, err := client.NewDocIDFromString(argDocIDs[0]) - if err != nil { - return err - } - res, err := col.UpdateWithDocID(cmd.Context(), docID, updater) - if err != nil { - return err - } - return writeJSON(cmd, res) - case len(argDocIDs) > 1 && updater != "": - docIDs := make([]client.DocID, len(argDocIDs)) - for i, v := range argDocIDs { - docID, err := client.NewDocIDFromString(v) - if err != nil { - return err - } - docIDs[i] = docID - } - res, err := col.UpdateWithDocIDs(cmd.Context(), docIDs, updater) - if err != nil { - return err - } - return writeJSON(cmd, res) case filter != "" && updater != "": res, err := col.UpdateWithFilter(cmd.Context(), filter, updater) if err != nil { return err } return writeJSON(cmd, res) - case len(argDocIDs) == 1 && len(args) == 1: - docID, err := client.NewDocIDFromString(argDocIDs[0]) + case argDocID != "" && len(args) == 1: + docID, err := client.NewDocIDFromString(argDocID) if err != nil { return err } @@ -92,7 +72,7 @@ Example: update by docIDs } }, } - cmd.Flags().StringSliceVar(&argDocIDs, "docID", nil, "Document ID") + cmd.Flags().StringVar(&argDocID, "docID", "", "Document ID") cmd.Flags().StringVar(&filter, "filter", "", "Document filter") cmd.Flags().StringVar(&updater, "updater", "", "Document updater") return cmd diff --git a/cli/config.go b/cli/config.go index bb57a8cb3d..fd275a2d01 100644 --- a/cli/config.go +++ b/cli/config.go @@ -15,10 +15,9 @@ import ( "path/filepath" "strings" + "github.com/sourcenetwork/corelog" "github.com/spf13/pflag" "github.com/spf13/viper" - - "github.com/sourcenetwork/defradb/logging" ) const ( @@ -41,11 +40,13 @@ var configPaths = []string{ // configFlags is a mapping of config keys to cli flags to bind to. var configFlags = map[string]string{ - "log.level": "loglevel", - "log.output": "logoutput", - "log.format": "logformat", - "log.stacktrace": "logtrace", - "log.nocolor": "lognocolor", + "log.level": "log-level", + "log.output": "log-output", + "log.format": "log-format", + "log.stacktrace": "log-stacktrace", + "log.source": "log-source", + "log.overrides": "log-overrides", + "log.nocolor": "log-no-color", "api.address": "url", "datastore.maxtxnretries": "max-txn-retries", "datastore.store": "store", @@ -125,14 +126,17 @@ func loadConfig(rootdir string, flags *pflag.FlagSet) (*viper.Viper, error) { } } - logCfg := loggingConfig(cfg.Sub("log")) - logCfg.OverridesByLoggerName = make(map[string]logging.Config) + // set default logging config + corelog.SetConfig(corelog.Config{ + Level: cfg.GetString("log.level"), + Format: cfg.GetString("log.format"), + Output: cfg.GetString("log.output"), + EnableStackTrace: cfg.GetBool("log.stacktrace"), + EnableSource: cfg.GetBool("log.source"), + }) - // apply named logging overrides - for key := range cfg.GetStringMap("log.overrides") { - logCfg.OverridesByLoggerName[key] = loggingConfig(cfg.Sub("log.overrides." + key)) - } - logging.SetConfig(logCfg) + // set logging config overrides + corelog.SetConfigOverrides(cfg.GetString("log.overrides")) return cfg, nil } @@ -147,39 +151,3 @@ func bindConfigFlags(cfg *viper.Viper, flags *pflag.FlagSet) error { } return nil } - -// loggingConfig returns a new logging config from the given config. -func loggingConfig(cfg *viper.Viper) logging.Config { - var level int8 - switch value := cfg.GetString("level"); value { - case configLogLevelDebug: - level = logging.Debug - case configLogLevelInfo: - level = logging.Info - case configLogLevelError: - level = logging.Error - case configLogLevelFatal: - level = logging.Fatal - default: - level = logging.Info - } - - var format logging.EncoderFormat - switch value := cfg.GetString("format"); value { - case configLogFormatJSON: - format = logging.JSON - case configLogFormatCSV: - format = logging.CSV - default: - format = logging.CSV - } - - return logging.Config{ - Level: logging.NewLogLevelOption(level), - EnableStackTrace: logging.NewEnableStackTraceOption(cfg.GetBool("stacktrace")), - DisableColor: logging.NewDisableColorOption(cfg.GetBool("nocolor")), - EncoderFormat: logging.NewEncoderFormatOption(format), - OutputPaths: []string{cfg.GetString("output")}, - EnableCaller: logging.NewEnableCallerOption(cfg.GetBool("caller")), - } -} diff --git a/cli/config_test.go b/cli/config_test.go index 210743477c..39a17d60fd 100644 --- a/cli/config_test.go +++ b/cli/config_test.go @@ -53,9 +53,10 @@ func TestLoadConfigNotExist(t *testing.T) { assert.Equal(t, []string{}, cfg.GetStringSlice("net.peers")) assert.Equal(t, "info", cfg.GetString("log.level")) - assert.Equal(t, false, cfg.GetBool("log.stacktrace")) - assert.Equal(t, "csv", cfg.GetString("log.format")) assert.Equal(t, "stderr", cfg.GetString("log.output")) + assert.Equal(t, "text", cfg.GetString("log.format")) + assert.Equal(t, false, cfg.GetBool("log.stacktrace")) + assert.Equal(t, false, cfg.GetBool("log.source")) + assert.Equal(t, "", cfg.GetString("log.overrides")) assert.Equal(t, false, cfg.GetBool("log.nocolor")) - assert.Equal(t, false, cfg.GetBool("log.caller")) } diff --git a/cli/dump.go b/cli/dump.go index a3d155605b..76b36bab99 100644 --- a/cli/dump.go +++ b/cli/dump.go @@ -12,8 +12,6 @@ package cli import ( "github.com/spf13/cobra" - - "github.com/sourcenetwork/defradb/client" ) func MakeDumpCommand() *cobra.Command { @@ -21,7 +19,7 @@ func MakeDumpCommand() *cobra.Command { Use: "dump", Short: "Dump the contents of DefraDB node-side", RunE: func(cmd *cobra.Command, _ []string) (err error) { - db := cmd.Context().Value(dbContextKey).(client.DB) + db := mustGetContextDB(cmd) return db.PrintDump(cmd.Context()) }, } diff --git a/cli/errors.go b/cli/errors.go index bb124bc7f9..02cd252b59 100644 --- a/cli/errors.go +++ b/cli/errors.go @@ -11,25 +11,37 @@ package cli import ( + "fmt" + "github.com/sourcenetwork/defradb/errors" ) const ( errInvalidLensConfig string = "invalid lens configuration" errSchemaVersionNotOfSchema string = "the given schema version is from a different schema" + errRequiredFlag string = "the required flag [--%s|-%s] is %s" ) var ( - ErrNoDocOrFile = errors.New("document or file must be defined") - ErrInvalidDocument = errors.New("invalid document") - ErrNoDocIDOrFilter = errors.New("docID or filter must be defined") - ErrInvalidExportFormat = errors.New("invalid export format") - ErrNoLensConfig = errors.New("lens config cannot be empty") - ErrInvalidLensConfig = errors.New("invalid lens configuration") - ErrSchemaVersionNotOfSchema = errors.New(errSchemaVersionNotOfSchema) - ErrViewAddMissingArgs = errors.New("please provide a base query and output SDL for this view") + ErrNoDocOrFile = errors.New("document or file must be defined") + ErrInvalidDocument = errors.New("invalid document") + ErrNoDocIDOrFilter = errors.New("docID or filter must be defined") + ErrInvalidExportFormat = errors.New("invalid export format") + ErrNoLensConfig = errors.New("lens config cannot be empty") + ErrInvalidLensConfig = errors.New("invalid lens configuration") + ErrSchemaVersionNotOfSchema = errors.New(errSchemaVersionNotOfSchema) + ErrViewAddMissingArgs = errors.New("please provide a base query and output SDL for this view") + ErrPolicyFileArgCanNotBeEmpty = errors.New("policy file argument can not be empty") ) +func NewErrRequiredFlagEmpty(longName string, shortName string) error { + return errors.New(fmt.Sprintf(errRequiredFlag, longName, shortName, "empty")) +} + +func NewErrRequiredFlagInvalid(longName string, shortName string) error { + return errors.New(fmt.Sprintf(errRequiredFlag, longName, shortName, "invalid")) +} + func NewErrInvalidLensConfig(inner error) error { return errors.Wrap(errInvalidLensConfig, inner) } diff --git a/cli/index_create.go b/cli/index_create.go index bfe5ec64c2..0d724da15b 100644 --- a/cli/index_create.go +++ b/cli/index_create.go @@ -14,7 +14,6 @@ import ( "github.com/spf13/cobra" "github.com/sourcenetwork/defradb/client" - "github.com/sourcenetwork/defradb/datastore" ) func MakeIndexCreateCommand() *cobra.Command { @@ -52,9 +51,6 @@ Example: create a named index for 'Users' collection on 'name' field: if err != nil { return err } - if tx, ok := cmd.Context().Value(txContextKey).(datastore.Txn); ok { - col = col.WithTxn(tx) - } desc, err = col.CreateIndex(cmd.Context(), desc) if err != nil { return err diff --git a/cli/index_drop.go b/cli/index_drop.go index 96f007268d..5dd069b5da 100644 --- a/cli/index_drop.go +++ b/cli/index_drop.go @@ -12,8 +12,6 @@ package cli import ( "github.com/spf13/cobra" - - "github.com/sourcenetwork/defradb/datastore" ) func MakeIndexDropCommand() *cobra.Command { @@ -34,9 +32,6 @@ Example: drop the index 'UsersByName' for 'Users' collection: if err != nil { return err } - if tx, ok := cmd.Context().Value(txContextKey).(datastore.Txn); ok { - col = col.WithTxn(tx) - } return col.DropIndex(cmd.Context(), nameArg) }, } diff --git a/cli/index_list.go b/cli/index_list.go index bf1fd21251..481acb7d37 100644 --- a/cli/index_list.go +++ b/cli/index_list.go @@ -12,8 +12,6 @@ package cli import ( "github.com/spf13/cobra" - - "github.com/sourcenetwork/defradb/datastore" ) func MakeIndexListCommand() *cobra.Command { @@ -38,9 +36,6 @@ Example: show all index for 'Users' collection: if err != nil { return err } - if tx, ok := cmd.Context().Value(txContextKey).(datastore.Txn); ok { - col = col.WithTxn(tx) - } indexes, err := col.GetIndexes(cmd.Context()) if err != nil { return err diff --git a/cli/request.go b/cli/request.go index d5e37e79a3..3dba0c197d 100644 --- a/cli/request.go +++ b/cli/request.go @@ -27,7 +27,7 @@ const ( func MakeRequestCommand() *cobra.Command { var filePath string var cmd = &cobra.Command{ - Use: "query [query request]", + Use: "query [-i --identity] [request]", Short: "Send a DefraDB GraphQL query request", Long: `Send a DefraDB GraphQL query request to the database. @@ -37,6 +37,9 @@ A query request can be sent as a single argument. Example command: Do a query request from a file by using the '-f' flag. Example command: defradb client query -f request.graphql +Do a query request from a file and with an identity. Example command: + defradb client query -i cosmos1f2djr7dl9vhrk3twt3xwqp09nhtzec9mdkf70j -f request.graphql + Or it can be sent via stdin by using the '-' special syntax. Example command: cat request.graphql | defradb client query - @@ -45,8 +48,6 @@ with the database more conveniently. To learn more about the DefraDB GraphQL Query Language, refer to https://docs.source.network.`, RunE: func(cmd *cobra.Command, args []string) error { - store := mustGetContextStore(cmd) - var request string switch { case filePath != "": @@ -68,6 +69,8 @@ To learn more about the DefraDB GraphQL Query Language, refer to https://docs.so if request == "" { return errors.New("request cannot be empty") } + + store := mustGetContextStore(cmd) result := store.ExecRequest(cmd.Context(), request) var errors []string diff --git a/cli/root.go b/cli/root.go index e4ba349f76..8fc8baf628 100644 --- a/cli/root.go +++ b/cli/root.go @@ -38,31 +38,43 @@ Start a DefraDB node, interact with a local or remote node, and much more. ) cmd.PersistentFlags().String( - "loglevel", + "log-level", "info", "Log level to use. Options are debug, info, error, fatal", ) cmd.PersistentFlags().String( - "logoutput", + "log-output", "stderr", - "Log output path", + "Log output path. Options are stderr or stdout.", ) cmd.PersistentFlags().String( - "logformat", - "csv", - "Log format to use. Options are csv, json", + "log-format", + "text", + "Log format to use. Options are text or json", ) cmd.PersistentFlags().Bool( - "logtrace", + "log-stacktrace", false, "Include stacktrace in error and fatal logs", ) cmd.PersistentFlags().Bool( - "lognocolor", + "log-source", + false, + "Include source location in logs", + ) + + cmd.PersistentFlags().String( + "log-overrides", + "", + "Logger config overrides. Format ,=,...;,...", + ) + + cmd.PersistentFlags().Bool( + "log-no-color", false, "Disable colored log output", ) diff --git a/cli/schema_add.go b/cli/schema_add.go index f987d062df..e81896322d 100644 --- a/cli/schema_add.go +++ b/cli/schema_add.go @@ -25,6 +25,11 @@ func MakeSchemaAddCommand() *cobra.Command { Short: "Add new schema", Long: `Add new schema. +Schema Object with a '@policy(id:".." resource: "..")' linked will only be accepted if: + - ACP is available (i.e. ACP is not disabled). + - The specified resource adheres to the Document Access Control DPI Rules. + - Learn more about [ACP & DPI Rules](/acp/README.md) + Example: add from an argument string: defradb client schema add 'type Foo { ... }' diff --git a/cli/schema_migration_down.go b/cli/schema_migration_down.go index 1d7622257c..a49f359694 100644 --- a/cli/schema_migration_down.go +++ b/cli/schema_migration_down.go @@ -17,8 +17,6 @@ import ( "github.com/sourcenetwork/immutable/enumerable" "github.com/spf13/cobra" - - "github.com/sourcenetwork/defradb/datastore" ) func MakeSchemaMigrationDownCommand() *cobra.Command { @@ -67,12 +65,7 @@ Example: migrate from stdin if err := json.Unmarshal(srcData, &src); err != nil { return err } - lens := store.LensRegistry() - if tx, ok := cmd.Context().Value(txContextKey).(datastore.Txn); ok { - lens = lens.WithTxn(tx) - } - - out, err := lens.MigrateDown(cmd.Context(), enumerable.New(src), collectionID) + out, err := store.LensRegistry().MigrateDown(cmd.Context(), enumerable.New(src), collectionID) if err != nil { return err } diff --git a/cli/schema_migration_reload.go b/cli/schema_migration_reload.go index 4266b3ec3f..8ffb5542f1 100644 --- a/cli/schema_migration_reload.go +++ b/cli/schema_migration_reload.go @@ -12,8 +12,6 @@ package cli import ( "github.com/spf13/cobra" - - "github.com/sourcenetwork/defradb/datastore" ) func MakeSchemaMigrationReloadCommand() *cobra.Command { @@ -23,12 +21,7 @@ func MakeSchemaMigrationReloadCommand() *cobra.Command { Long: `Reload the schema migrations within DefraDB`, RunE: func(cmd *cobra.Command, args []string) error { store := mustGetContextStore(cmd) - - lens := store.LensRegistry() - if tx, ok := cmd.Context().Value(txContextKey).(datastore.Txn); ok { - lens = lens.WithTxn(tx) - } - return lens.ReloadLenses(cmd.Context()) + return store.LensRegistry().ReloadLenses(cmd.Context()) }, } return cmd diff --git a/cli/schema_migration_up.go b/cli/schema_migration_up.go index 577b87d4c7..4473c45911 100644 --- a/cli/schema_migration_up.go +++ b/cli/schema_migration_up.go @@ -17,8 +17,6 @@ import ( "github.com/sourcenetwork/immutable/enumerable" "github.com/spf13/cobra" - - "github.com/sourcenetwork/defradb/datastore" ) func MakeSchemaMigrationUpCommand() *cobra.Command { @@ -67,12 +65,7 @@ Example: migrate from stdin if err := json.Unmarshal(srcData, &src); err != nil { return err } - lens := store.LensRegistry() - if tx, ok := cmd.Context().Value(txContextKey).(datastore.Txn); ok { - lens = lens.WithTxn(tx) - } - - out, err := lens.MigrateUp(cmd.Context(), enumerable.New(src), collectionID) + out, err := store.LensRegistry().MigrateUp(cmd.Context(), enumerable.New(src), collectionID) if err != nil { return err } diff --git a/cli/schema_patch.go b/cli/schema_patch.go index 23f425396d..cf9224d204 100644 --- a/cli/schema_patch.go +++ b/cli/schema_patch.go @@ -37,7 +37,7 @@ Example: patch from an argument string: defradb client schema patch '[{ "op": "add", "path": "...", "value": {...} }]' '{"lenses": [...' Example: patch from file: - defradb client schema patch -f patch.json + defradb client schema patch -p patch.json Example: patch from stdin: cat patch.json | defradb client schema patch - diff --git a/cli/server_dump.go b/cli/server_dump.go index eb364a247f..767b86f364 100644 --- a/cli/server_dump.go +++ b/cli/server_dump.go @@ -24,7 +24,7 @@ func MakeServerDumpCmd() *cobra.Command { Short: "Dumps the state of the entire database", RunE: func(cmd *cobra.Command, _ []string) error { cfg := mustGetContextConfig(cmd) - log.FeedbackInfo(cmd.Context(), "Dumping DB state...") + log.InfoContext(cmd.Context(), "Dumping DB state...") if cfg.GetString("datastore.store") != configStoreBadger { return errors.New("server-side dump is only supported for the Badger datastore") diff --git a/cli/start.go b/cli/start.go index d4e789cbc6..ca9267e7e9 100644 --- a/cli/start.go +++ b/cli/start.go @@ -50,6 +50,10 @@ func MakeStartCommand() *cobra.Command { dbOpts := []db.Option{ db.WithUpdateEvents(), db.WithMaxRetries(cfg.GetInt("datastore.MaxTxnRetries")), + // TODO-ACP: Infuture when we add support for the --no-acp flag when admin signatures are in, + // we can allow starting of db without acp. Currently that can only be done programmatically. + // https://github.com/sourcenetwork/defradb/issues/2271 + db.WithACPInMemory(), } netOpts := []net.NodeOpt{ @@ -84,12 +88,17 @@ func MakeStartCommand() *cobra.Command { // Running with memory store mode will always generate a random key. // Adding support for an ephemeral mode and moving the key to the // config would solve both of these issues. - rootdir := mustGetContextRootDir(cmd) - key, err := loadOrGeneratePrivateKey(filepath.Join(rootdir, "data", "key")) + rootDir := mustGetContextRootDir(cmd) + key, err := loadOrGeneratePrivateKey(filepath.Join(rootDir, "data", "key")) if err != nil { return err } netOpts = append(netOpts, net.WithPrivateKey(key)) + + // TODO-ACP: Infuture when we add support for the --no-acp flag when admin signatures are in, + // we can allow starting of db without acp. Currently that can only be done programmatically. + // https://github.com/sourcenetwork/defradb/issues/2271 + dbOpts = append(dbOpts, db.WithACP(rootDir)) } opts := []node.NodeOpt{ @@ -108,11 +117,11 @@ func MakeStartCommand() *cobra.Command { defer func() { if err := n.Close(cmd.Context()); err != nil { - log.FeedbackErrorE(cmd.Context(), "Stopping DefraDB", err) + log.ErrorContextE(cmd.Context(), "Stopping DefraDB", err) } }() - log.FeedbackInfo(cmd.Context(), "Starting DefraDB") + log.InfoContext(cmd.Context(), "Starting DefraDB") if err := n.Start(cmd.Context()); err != nil { return err } @@ -122,9 +131,9 @@ func MakeStartCommand() *cobra.Command { select { case <-cmd.Context().Done(): - log.FeedbackInfo(cmd.Context(), "Received context cancellation; shutting down...") + log.InfoContext(cmd.Context(), "Received context cancellation; shutting down...") case <-signalCh: - log.FeedbackInfo(cmd.Context(), "Received interrupt; shutting down...") + log.InfoContext(cmd.Context(), "Received interrupt; shutting down...") } return nil diff --git a/cli/tx_create.go b/cli/tx_create.go index da239b6943..5190ba20f7 100644 --- a/cli/tx_create.go +++ b/cli/tx_create.go @@ -13,7 +13,6 @@ package cli import ( "github.com/spf13/cobra" - "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/datastore" ) @@ -25,7 +24,7 @@ func MakeTxCreateCommand() *cobra.Command { Short: "Create a new DefraDB transaction.", Long: `Create a new DefraDB transaction.`, RunE: func(cmd *cobra.Command, args []string) (err error) { - db := cmd.Context().Value(dbContextKey).(client.DB) + db := mustGetContextDB(cmd) var tx datastore.Txn if concurrent { diff --git a/cli/utils.go b/cli/utils.go index caeb282606..25af57528b 100644 --- a/cli/utils.go +++ b/cli/utils.go @@ -20,8 +20,9 @@ import ( "github.com/spf13/cobra" "github.com/spf13/viper" + acpIdentity "github.com/sourcenetwork/defradb/acp/identity" "github.com/sourcenetwork/defradb/client" - "github.com/sourcenetwork/defradb/datastore" + "github.com/sourcenetwork/defradb/db" "github.com/sourcenetwork/defradb/http" ) @@ -32,17 +33,8 @@ var ( cfgContextKey = contextKey("cfg") // rootDirContextKey is the context key for the root directory. rootDirContextKey = contextKey("rootDir") - // txContextKey is the context key for the datastore.Txn - // - // This will only be set if a transaction id is specified. - txContextKey = contextKey("tx") // dbContextKey is the context key for the client.DB dbContextKey = contextKey("db") - // storeContextKey is the context key for the client.Store - // - // If a transaction exists, all operations will be executed - // in the current transaction context. - storeContextKey = contextKey("store") // colContextKey is the context key for the client.Collection // // If a transaction exists, all operations will be executed @@ -50,11 +42,18 @@ var ( colContextKey = contextKey("col") ) +// mustGetContextDB returns the db for the current command context. +// +// If a db is not set in the current context this function panics. +func mustGetContextDB(cmd *cobra.Command) client.DB { + return cmd.Context().Value(dbContextKey).(client.DB) +} + // mustGetContextStore returns the store for the current command context. // // If a store is not set in the current context this function panics. func mustGetContextStore(cmd *cobra.Command) client.Store { - return cmd.Context().Value(storeContextKey).(client.Store) + return cmd.Context().Value(dbContextKey).(client.Store) } // mustGetContextP2P returns the p2p implementation for the current command context. @@ -85,6 +84,18 @@ func tryGetContextCollection(cmd *cobra.Command) (client.Collection, bool) { return col, ok } +// setContextDB sets the db for the current command context. +func setContextDB(cmd *cobra.Command) error { + cfg := mustGetContextConfig(cmd) + db, err := http.NewClient(cfg.GetString("api.address")) + if err != nil { + return err + } + ctx := context.WithValue(cmd.Context(), dbContextKey, db) + cmd.SetContext(ctx) + return nil +} + // setContextConfig sets teh config for the current command context. func setContextConfig(cmd *cobra.Command) error { rootdir := mustGetContextRootDir(cmd) @@ -108,24 +119,18 @@ func setContextTransaction(cmd *cobra.Command, txId uint64) error { if err != nil { return err } - ctx := context.WithValue(cmd.Context(), txContextKey, tx) + ctx := db.SetContextTxn(cmd.Context(), tx) cmd.SetContext(ctx) return nil } -// setContextStore sets the store for the current command context. -func setContextStore(cmd *cobra.Command) error { - cfg := mustGetContextConfig(cmd) - db, err := http.NewClient(cfg.GetString("api.address")) - if err != nil { - return err - } - ctx := context.WithValue(cmd.Context(), dbContextKey, db) - if tx, ok := ctx.Value(txContextKey).(datastore.Txn); ok { - ctx = context.WithValue(ctx, storeContextKey, db.WithTxn(tx)) - } else { - ctx = context.WithValue(ctx, storeContextKey, db) +// setContextIdentity sets the identity for the current command context. +func setContextIdentity(cmd *cobra.Command, identity string) error { + // TODO-ACP: `https://github.com/sourcenetwork/defradb/issues/2358` do the validation here. + if identity == "" { + return nil } + ctx := db.SetContextIdentity(cmd.Context(), acpIdentity.New(identity)) cmd.SetContext(ctx) return nil } diff --git a/client/README.md b/client/README.md new file mode 100644 index 0000000000..ec2cf7efcd --- /dev/null +++ b/client/README.md @@ -0,0 +1,3 @@ +The `client` package is the primary access point for interacting with an embedded DefraDB instance. + +[Data definition overview](./data_definition.md) - How the shape of documents are defined and grouped. diff --git a/client/collection.go b/client/collection.go index 58b53c3af0..38c309a0e8 100644 --- a/client/collection.go +++ b/client/collection.go @@ -14,8 +14,6 @@ import ( "context" "github.com/sourcenetwork/immutable" - - "github.com/sourcenetwork/defradb/datastore" ) // Collection represents a defradb collection. @@ -46,12 +44,12 @@ type Collection interface { // Create a new document. // // Will verify the DocID/CID to ensure that the new document is correctly formatted. - Create(context.Context, *Document) error + Create(ctx context.Context, doc *Document) error // CreateMany new documents. // // Will verify the DocIDs/CIDs to ensure that the new documents are correctly formatted. - CreateMany(context.Context, []*Document) error + CreateMany(ctx context.Context, docs []*Document) error // Update an existing document with the new values. // @@ -59,100 +57,54 @@ type Collection interface { // Any field that is nil/empty that hasn't called Clear will be ignored. // // Will return a ErrDocumentNotFound error if the given document is not found. - Update(context.Context, *Document) error + Update(ctx context.Context, docs *Document) error // Save the given document in the database. // // If a document exists with the given DocID it will update it. Otherwise a new document // will be created. - Save(context.Context, *Document) error + Save(ctx context.Context, doc *Document) error // Delete will attempt to delete a document by DocID. // // Will return true if a deletion is successful, and return false along with an error // if it cannot. If the document doesn't exist, then it will return false and a ErrDocumentNotFound error. - // This operation will hard-delete all state relating to the given DocID. This includes data, block, and head storage. - Delete(context.Context, DocID) (bool, error) + // This operation will hard-delete all state relating to the given DocID. + // This includes data, block, and head storage. + Delete(ctx context.Context, docID DocID) (bool, error) // Exists checks if a given document exists with supplied DocID. // // Will return true if a matching document exists, otherwise will return false. - Exists(context.Context, DocID) (bool, error) - - // UpdateWith updates a target document using the given updater type. - // - // Target can be a Filter statement, a single DocID, a single document, - // an array of DocIDs, or an array of documents. - // It is recommended to use the respective typed versions of Update - // (e.g. UpdateWithFilter or UpdateWithDocID) over this function if you can. - // - // Returns an ErrInvalidUpdateTarget error if the target type is not supported. - // Returns an ErrInvalidUpdater error if the updater type is not supported. - UpdateWith(ctx context.Context, target any, updater string) (*UpdateResult, error) + Exists(ctx context.Context, docID DocID) (bool, error) // UpdateWithFilter updates using a filter to target documents for update. // // The provided updater must be a string Patch, string Merge Patch, a parsed Patch, or parsed Merge Patch // else an ErrInvalidUpdater will be returned. - UpdateWithFilter(ctx context.Context, filter any, updater string) (*UpdateResult, error) - - // UpdateWithDocID updates using a DocID to target a single document for update. - // - // The provided updater must be a string Patch, string Merge Patch, a parsed Patch, or parsed Merge Patch - // else an ErrInvalidUpdater will be returned. - // - // Returns an ErrDocumentNotFound if a document matching the given DocID is not found. - UpdateWithDocID(ctx context.Context, docID DocID, updater string) (*UpdateResult, error) - - // UpdateWithDocIDs updates documents matching the given DocIDs. - // - // The provided updater must be a string Patch, string Merge Patch, a parsed Patch, or parsed Merge Patch - // else an ErrInvalidUpdater will be returned. - // - // Returns an ErrDocumentNotFound if a document is not found for any given DocID. - UpdateWithDocIDs(context.Context, []DocID, string) (*UpdateResult, error) - - // DeleteWith deletes a target document. - // - // Target can be a Filter statement, a single DocID, a single document, an array of DocIDs, - // or an array of documents. It is recommended to use the respective typed versions of Delete - // (e.g. DeleteWithFilter or DeleteWithDocID) over this function if you can. - // This operation will soft-delete documents related to the given DocID and update the composite block - // with a status of `Deleted`. - // - // Returns an ErrInvalidDeleteTarget if the target type is not supported. - DeleteWith(ctx context.Context, target any) (*DeleteResult, error) + UpdateWithFilter( + ctx context.Context, + filter any, + updater string, + ) (*UpdateResult, error) // DeleteWithFilter deletes documents matching the given filter. // // This operation will soft-delete documents related to the given filter and update the composite block // with a status of `Deleted`. - DeleteWithFilter(ctx context.Context, filter any) (*DeleteResult, error) - - // DeleteWithDocID deletes using a DocID to target a single document for delete. - // - // This operation will soft-delete documents related to the given DocID and update the composite block - // with a status of `Deleted`. - // - // Returns an ErrDocumentNotFound if a document matching the given DocID is not found. - DeleteWithDocID(context.Context, DocID) (*DeleteResult, error) - - // DeleteWithDocIDs deletes documents matching the given DocIDs. - // - // This operation will soft-delete documents related to the given DocIDs and update the composite block - // with a status of `Deleted`. - // - // Returns an ErrDocumentNotFound if a document is not found for any given DocID. - DeleteWithDocIDs(context.Context, []DocID) (*DeleteResult, error) + DeleteWithFilter( + ctx context.Context, + filter any, + ) (*DeleteResult, error) // Get returns the document with the given DocID. // // Returns an ErrDocumentNotFound if a document matching the given DocID is not found. - Get(ctx context.Context, docID DocID, showDeleted bool) (*Document, error) - - // WithTxn returns a new instance of the collection, with a transaction - // handle instead of a raw DB handle. - WithTxn(datastore.Txn) Collection + Get( + ctx context.Context, + docID DocID, + showDeleted bool, + ) (*Document, error) // GetAllDocIDs returns all the document IDs that exist in the collection. GetAllDocIDs(ctx context.Context) (<-chan DocIDResult, error) @@ -162,6 +114,7 @@ type Collection interface { // `IndexDescription.Name` must start with a letter or an underscore and can // only contain letters, numbers, and underscores. // If the name of the index is not provided, it will be generated. + // WARNING: This method can not create index for a collection that has a policy. CreateIndex(context.Context, IndexDescription) (IndexDescription, error) // DropIndex drops an index from the collection. diff --git a/client/descriptions.go b/client/collection_description.go similarity index 50% rename from client/descriptions.go rename to client/collection_description.go index dd12e9cf00..aa22bf7121 100644 --- a/client/descriptions.go +++ b/client/collection_description.go @@ -1,4 +1,4 @@ -// Copyright 2022 Democratized Data Foundation +// Copyright 2024 Democratized Data Foundation // // Use of this software is governed by the Business Source License // included in the file licenses/BSL.txt. @@ -60,75 +60,25 @@ type CollectionDescription struct { // - [CollectionSource] Sources []any - // Fields contains the fields within this Collection. + // Fields contains the fields local to the node within this Collection. + // + // Most fields defined here will also be present on the [SchemaDescription]. A notable + // exception to this are the fields of the (optional) secondary side of a relation + // which are local only, and will not be present on the [SchemaDescription]. Fields []CollectionFieldDescription // Indexes contains the secondary indexes that this Collection has. Indexes []IndexDescription -} -// IDString returns the collection ID as a string. -func (col CollectionDescription) IDString() string { - return fmt.Sprint(col.ID) -} - -// GetFieldByName returns the field for the given field name. If such a field is found it -// will return it and true, if it is not found it will return false. -func (col CollectionDescription) GetFieldByName(fieldName string) (CollectionFieldDescription, bool) { - for _, field := range col.Fields { - if field.Name == fieldName { - return field, true - } - } - return CollectionFieldDescription{}, false -} - -// GetFieldByName returns the field for the given field name. If such a field is found it -// will return it and true, if it is not found it will return false. -func (s SchemaDescription) GetFieldByName(fieldName string) (SchemaFieldDescription, bool) { - for _, field := range s.Fields { - if field.Name == fieldName { - return field, true - } - } - return SchemaFieldDescription{}, false -} - -// GetFieldByRelation returns the field that supports the relation of the given name. -func (col CollectionDescription) GetFieldByRelation( - relationName string, - otherCollectionName string, - otherFieldName string, - schema *SchemaDescription, -) (SchemaFieldDescription, bool) { - for _, field := range schema.Fields { - if field.RelationName == relationName && - !(col.Name.Value() == otherCollectionName && otherFieldName == field.Name) && - field.Kind != FieldKind_DocID { - return field, true - } - } - return SchemaFieldDescription{}, false -} - -// QuerySources returns all the Sources of type [QuerySource] -func (col CollectionDescription) QuerySources() []*QuerySource { - return sourcesOfType[*QuerySource](col) -} - -// CollectionSources returns all the Sources of type [CollectionSource] -func (col CollectionDescription) CollectionSources() []*CollectionSource { - return sourcesOfType[*CollectionSource](col) -} - -func sourcesOfType[ResultType any](col CollectionDescription) []ResultType { - result := []ResultType{} - for _, source := range col.Sources { - if typedSource, isOfType := source.(ResultType); isOfType { - result = append(result, typedSource) - } - } - return result + // Policy contains the policy information on this collection. + // + // It is possible for a collection to not have a policy, a collection + // without a policy has no access control. + // + // Note: The policy information must be validated using acp right after + // parsing is done, to avoid storing an invalid policyID or policy resource + // that may not even exist on acp. + Policy immutable.Option[PolicyDescription] } // QuerySource represents a collection data source from a query. @@ -169,213 +119,56 @@ type CollectionSource struct { Transform immutable.Option[model.Lens] } -// SchemaDescription describes a Schema and its associated metadata. -type SchemaDescription struct { - // Root is the version agnostic identifier for this schema. - // - // It remains constant throughout the lifetime of this schema. - Root string - - // VersionID is the version-specific identifier for this schema. - // - // It is generated on mutation of this schema and can be used to uniquely - // identify a schema at a specific version. - VersionID string - - // Name is the name of this Schema. - // - // It is currently used to define the Collection Name, and as such these two properties - // will currently share the same name. - // - // It is immutable. - Name string - - // Fields contains the fields within this Schema. - // - // Currently new fields may be added after initial declaration, but they cannot be removed. - Fields []SchemaFieldDescription +// IDString returns the collection ID as a string. +func (col CollectionDescription) IDString() string { + return fmt.Sprint(col.ID) } -// FieldKind describes the type of a field. -type FieldKind uint8 - -func (f FieldKind) String() string { - switch f { - case FieldKind_DocID: - return "ID" - case FieldKind_NILLABLE_BOOL: - return "Boolean" - case FieldKind_NILLABLE_BOOL_ARRAY: - return "[Boolean]" - case FieldKind_BOOL_ARRAY: - return "[Boolean!]" - case FieldKind_NILLABLE_INT: - return "Int" - case FieldKind_NILLABLE_INT_ARRAY: - return "[Int]" - case FieldKind_INT_ARRAY: - return "[Int!]" - case FieldKind_NILLABLE_DATETIME: - return "DateTime" - case FieldKind_NILLABLE_FLOAT: - return "Float" - case FieldKind_NILLABLE_FLOAT_ARRAY: - return "[Float]" - case FieldKind_FLOAT_ARRAY: - return "[Float!]" - case FieldKind_NILLABLE_STRING: - return "String" - case FieldKind_NILLABLE_STRING_ARRAY: - return "[String]" - case FieldKind_STRING_ARRAY: - return "[String!]" - case FieldKind_NILLABLE_BLOB: - return "Blob" - case FieldKind_NILLABLE_JSON: - return "JSON" - default: - return fmt.Sprint(uint8(f)) +// GetFieldByName returns the field for the given field name. If such a field is found it +// will return it and true, if it is not found it will return false. +func (col CollectionDescription) GetFieldByName(fieldName string) (CollectionFieldDescription, bool) { + for _, field := range col.Fields { + if field.Name == fieldName { + return field, true + } } + return CollectionFieldDescription{}, false } -// IsObject returns true if this FieldKind is an object type. -func (f FieldKind) IsObject() bool { - return f == FieldKind_FOREIGN_OBJECT || - f == FieldKind_FOREIGN_OBJECT_ARRAY -} - -// IsObjectArray returns true if this FieldKind is an object array type. -func (f FieldKind) IsObjectArray() bool { - return f == FieldKind_FOREIGN_OBJECT_ARRAY -} - -// IsArray returns true if this FieldKind is an array type which includes inline arrays as well -// as relation arrays. -func (f FieldKind) IsArray() bool { - return f == FieldKind_BOOL_ARRAY || - f == FieldKind_INT_ARRAY || - f == FieldKind_FLOAT_ARRAY || - f == FieldKind_STRING_ARRAY || - f == FieldKind_FOREIGN_OBJECT_ARRAY || - f == FieldKind_NILLABLE_BOOL_ARRAY || - f == FieldKind_NILLABLE_INT_ARRAY || - f == FieldKind_NILLABLE_FLOAT_ARRAY || - f == FieldKind_NILLABLE_STRING_ARRAY -} - -// Note: These values are serialized and persisted in the database, avoid modifying existing values. -const ( - FieldKind_None FieldKind = 0 - FieldKind_DocID FieldKind = 1 - FieldKind_NILLABLE_BOOL FieldKind = 2 - FieldKind_BOOL_ARRAY FieldKind = 3 - FieldKind_NILLABLE_INT FieldKind = 4 - FieldKind_INT_ARRAY FieldKind = 5 - FieldKind_NILLABLE_FLOAT FieldKind = 6 - FieldKind_FLOAT_ARRAY FieldKind = 7 - _ FieldKind = 8 // safe to repurpose (was never used) - _ FieldKind = 9 // safe to repurpose (previously old field) - FieldKind_NILLABLE_DATETIME FieldKind = 10 - FieldKind_NILLABLE_STRING FieldKind = 11 - FieldKind_STRING_ARRAY FieldKind = 12 - FieldKind_NILLABLE_BLOB FieldKind = 13 - FieldKind_NILLABLE_JSON FieldKind = 14 - _ FieldKind = 15 // safe to repurpose (was never used) - - // Embedded object, but accessed via foreign keys - FieldKind_FOREIGN_OBJECT FieldKind = 16 - - // Array of embedded objects, accessed via foreign keys - FieldKind_FOREIGN_OBJECT_ARRAY FieldKind = 17 - - FieldKind_NILLABLE_BOOL_ARRAY FieldKind = 18 - FieldKind_NILLABLE_INT_ARRAY FieldKind = 19 - FieldKind_NILLABLE_FLOAT_ARRAY FieldKind = 20 - FieldKind_NILLABLE_STRING_ARRAY FieldKind = 21 -) - -// FieldKindStringToEnumMapping maps string representations of [FieldKind] values to -// their enum values. -// -// It is currently used to by [db.PatchSchema] to allow string representations of -// [FieldKind] to be provided instead of their raw int values. This usage may expand -// in the future. They currently roughly correspond to the GQL field types, but this -// equality is not guaranteed. -var FieldKindStringToEnumMapping = map[string]FieldKind{ - "ID": FieldKind_DocID, - "Boolean": FieldKind_NILLABLE_BOOL, - "[Boolean]": FieldKind_NILLABLE_BOOL_ARRAY, - "[Boolean!]": FieldKind_BOOL_ARRAY, - "Int": FieldKind_NILLABLE_INT, - "[Int]": FieldKind_NILLABLE_INT_ARRAY, - "[Int!]": FieldKind_INT_ARRAY, - "DateTime": FieldKind_NILLABLE_DATETIME, - "Float": FieldKind_NILLABLE_FLOAT, - "[Float]": FieldKind_NILLABLE_FLOAT_ARRAY, - "[Float!]": FieldKind_FLOAT_ARRAY, - "String": FieldKind_NILLABLE_STRING, - "[String]": FieldKind_NILLABLE_STRING_ARRAY, - "[String!]": FieldKind_STRING_ARRAY, - "Blob": FieldKind_NILLABLE_BLOB, - "JSON": FieldKind_NILLABLE_JSON, -} - -// RelationType describes the type of relation between two types. -type RelationType uint8 - -// FieldID is a unique identifier for a field in a schema. -type FieldID uint32 - -func (f FieldID) String() string { - return fmt.Sprint(uint32(f)) -} - -// SchemaFieldDescription describes a field on a Schema and its associated metadata. -type SchemaFieldDescription struct { - // Name contains the name of this field. - // - // It is currently immutable. - Name string - - // The data type that this field holds. - // - // Must contain a valid value. It is currently immutable. - Kind FieldKind - - // Schema contains the schema name of the type this field contains if this field is - // a relation field. Otherwise this will be empty. - Schema string - - // RelationName the name of the relationship that this field represents if this field is - // a relation field. Otherwise this will be empty. - RelationName string - - // The CRDT Type of this field. If no type has been provided it will default to [LWW_REGISTER]. - // - // It is currently immutable. - Typ CType - - // If true, this is the primary half of a relation, otherwise is false. - IsPrimaryRelation bool +// GetFieldByRelation returns the field that supports the relation of the given name. +func (col CollectionDescription) GetFieldByRelation( + relationName string, + otherCollectionName string, + otherFieldName string, +) (CollectionFieldDescription, bool) { + for _, field := range col.Fields { + if field.RelationName.Value() == relationName && + !(col.Name.Value() == otherCollectionName && otherFieldName == field.Name) && + field.Kind.Value() != FieldKind_DocID { + return field, true + } + } + return CollectionFieldDescription{}, false } -// CollectionFieldDescription describes the local components of a field on a collection. -type CollectionFieldDescription struct { - // Name contains the name of the [SchemaFieldDescription] that this field uses. - Name string - - // ID contains the local, internal ID of this field. - ID FieldID +// QuerySources returns all the Sources of type [QuerySource] +func (col CollectionDescription) QuerySources() []*QuerySource { + return sourcesOfType[*QuerySource](col) } -// IsRelation returns true if this field is a relation. -func (f SchemaFieldDescription) IsRelation() bool { - return f.RelationName != "" +// CollectionSources returns all the Sources of type [CollectionSource] +func (col CollectionDescription) CollectionSources() []*CollectionSource { + return sourcesOfType[*CollectionSource](col) } -// IsSet returns true if the target relation type is set. -func (m RelationType) IsSet(target RelationType) bool { - return m&target > 0 +func sourcesOfType[ResultType any](col CollectionDescription) []ResultType { + result := []ResultType{} + for _, source := range col.Sources { + if typedSource, isOfType := source.(ResultType); isOfType { + result = append(result, typedSource) + } + } + return result } // collectionDescription is a private type used to facilitate the unmarshalling @@ -386,6 +179,7 @@ type collectionDescription struct { ID uint32 RootID uint32 SchemaVersionID string + Policy immutable.Option[PolicyDescription] Indexes []IndexDescription Fields []CollectionFieldDescription @@ -407,6 +201,7 @@ func (c *CollectionDescription) UnmarshalJSON(bytes []byte) error { c.Indexes = descMap.Indexes c.Fields = descMap.Fields c.Sources = make([]any, len(descMap.Sources)) + c.Policy = descMap.Policy for i, source := range descMap.Sources { sourceJson, err := json.Marshal(source) diff --git a/client/collection_field_description.go b/client/collection_field_description.go new file mode 100644 index 0000000000..98b012d641 --- /dev/null +++ b/client/collection_field_description.go @@ -0,0 +1,78 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package client + +import ( + "encoding/json" + "fmt" + + "github.com/sourcenetwork/immutable" +) + +// FieldID is a unique identifier for a field in a schema. +type FieldID uint32 + +// CollectionFieldDescription describes the local components of a field on a collection. +type CollectionFieldDescription struct { + // Name contains the name of the [SchemaFieldDescription] that this field uses. + Name string + + // ID contains the local, internal ID of this field. + ID FieldID + + // Kind contains the local field kind if this is a local-only field (e.g. the secondary + // side of a relation). + // + // If the field is globaly defined (on the Schema), this will be [None]. + Kind immutable.Option[FieldKind] + + // RelationName contains the name of this relation, if this field is part of a relationship. + // + // Otherwise will be [None]. + RelationName immutable.Option[string] +} + +func (f FieldID) String() string { + return fmt.Sprint(uint32(f)) +} + +// collectionFieldDescription is a private type used to facilitate the unmarshalling +// of json to a [CollectionFieldDescription]. +type collectionFieldDescription struct { + Name string + ID FieldID + RelationName immutable.Option[string] + + // Properties below this line are unmarshalled using custom logic in [UnmarshalJSON] + Kind json.RawMessage +} + +func (f *CollectionFieldDescription) UnmarshalJSON(bytes []byte) error { + var descMap collectionFieldDescription + err := json.Unmarshal(bytes, &descMap) + if err != nil { + return err + } + + f.Name = descMap.Name + f.ID = descMap.ID + f.RelationName = descMap.RelationName + kind, err := parseFieldKind(descMap.Kind) + if err != nil { + return err + } + + if kind != FieldKind_None { + f.Kind = immutable.Some(kind) + } + + return nil +} diff --git a/client/ctype.go b/client/ctype.go index c5f792df86..f9d961ec3e 100644 --- a/client/ctype.go +++ b/client/ctype.go @@ -23,12 +23,13 @@ const ( OBJECT COMPOSITE PN_COUNTER + P_COUNTER ) // IsSupportedFieldCType returns true if the type is supported as a document field type. func (t CType) IsSupportedFieldCType() bool { switch t { - case NONE_CRDT, LWW_REGISTER, PN_COUNTER: + case NONE_CRDT, LWW_REGISTER, PN_COUNTER, P_COUNTER: return true default: return false @@ -38,7 +39,7 @@ func (t CType) IsSupportedFieldCType() bool { // IsCompatibleWith returns true if the CRDT is compatible with the field kind func (t CType) IsCompatibleWith(kind FieldKind) bool { switch t { - case PN_COUNTER: + case PN_COUNTER, P_COUNTER: if kind == FieldKind_NILLABLE_INT || kind == FieldKind_NILLABLE_FLOAT { return true } @@ -61,6 +62,8 @@ func (t CType) String() string { return "composite" case PN_COUNTER: return "pncounter" + case P_COUNTER: + return "pcounter" default: return "unknown" } diff --git a/client/data_definition.md b/client/data_definition.md new file mode 100644 index 0000000000..c0a197158e --- /dev/null +++ b/client/data_definition.md @@ -0,0 +1,65 @@ +# Data Definition in a DefraDB instance + +Data held in a DefraDB instance is organized into [collections](#collections) of documents. [Collections](#collections) are [local](#local-definitions) groupings of documents that share the same [globally](#global-definitions) defined shape declared by a [schema](#schemas). + +## Local definitions + +Local definitions are specific to the node you are directly working with, they are not shared with, or assumed to be the same on other nodes in the network. + +Splitting local elements out from the global ones allows some local customization to the way data is organized within any given node. It also minimizes the amount of 'stuff' that must be kept consistent across the decentralized network in order to have a well behaving database. + +Local data definitions are always defined on the [collection](#collections). + +Examples include indexes, field IDs, and [lens transforms](https://docs.source.network/defradb/guides/schema-migration). + +## Global definitions + +Global definitions are consistent across all nodes in the decentralized network. This is enforced by the use of things like CIDs for schema versions. If a global definition was to differ across nodes, the different variations will be treated as a completely different definitions. + +Global data definitions are always defined on the [schema](#schemas). + +Examples include field names, field kinds and [CRDTs](https://docs.source.network/defradb/guides/merkle-crdt). + +## Collections + +Collections represent [local](#local-definitions), independently queryable datasets sharing the same shape. + +Collections are defined by the `CollectionDescription` struct. This can be mutated via the `PatchCollection` function. + +A collection will always have a [global](#global-definitions) shape defined by a single [schema](#schemas) version. + +### Versions + +`CollectionDescription` instances may be active or inactive. Inactive `CollectionDescription`s will not have a name, and cannot be queried. + +When a new [schema](#schemas) version is created and has a collection defined for it, a new `CollectionDescription` instance will be created and linked to the new schema version. The new `CollectionDescription` instance will share the same root ID as the previous, and may be active or inactive depending on what arguments the user defining the new schema specified. + +[Lens migrations](https://docs.source.network/defradb/guides/schema-migration) between collection versions may be defined. These are, like everything on the collection, [local](#local-definitions). They allow transformation of data between versions, allowing documents synced across the node network at one schema version to be presented to users at **query time** at another version. + +### Collection fields + +The set of fields on a `CollectionDescription` defines [local](#local-definitions) aspects to [globally](#global-definitions) defined fields on the collection's [schema](#schemas). The set may also include local-only fields that are not defined on the schema, and will not be synced to other nodes - currently these are limited the secondary side of a relationship defined between two collections. + +### Views + +Collections are not limited to representing writeable data. Collections can also represent views of written data. + +Views are collections with a `QuerySource` source in the `Sources` set. On query they will fetch data from the query defined on `QuerySource`, and then (optionally) apply a [Lens](https://github.com/lens-vm/lens) transform before yielding the results to the user. The query may point to another view, allowing views of views of views. + +Views may be defined using the `AddView` function. + +### Embedded types + +Some fields on a collection may represent a complex object, typically these will be a relationship to another collection, however they may instead represent and embedded type. + +Embedded types cannot exist or be queried outside of the context of their host collection, and thus are only defined as a [global](#global-definitions) shape represented by a [schema](#schemas) only. + +Related objects defined in a [view](#views) are embedded objects. + +## Schemas + +Schemas represent [global](#global-definitions) data shapes. They cannot host document data themselves or be queried, that is done via [collections](#collections). + +Schemas are defined by the `SchemaDescription` struct. They are immutable, however new versions can be created using the `PatchSchema` function. + +Multiple [collections](#collections) may reference the same schema. diff --git a/client/db.go b/client/db.go index 7b0cc8060f..c5cb95eb4b 100644 --- a/client/db.go +++ b/client/db.go @@ -42,9 +42,6 @@ type DB interface { // can safely operate on it concurrently. NewConcurrentTxn(context.Context, bool) (datastore.Txn, error) - // WithTxn returns a new [client.Store] that respects the given transaction. - WithTxn(datastore.Txn) Store - // Root returns the underlying root store, within which all data managed by DefraDB is held. Root() datastore.RootStore @@ -85,6 +82,18 @@ type DB interface { // // It is likely unwise to call this on a large database instance. PrintDump(ctx context.Context) error + + // AddPolicy adds policy to acp, if acp is available. + // + // If policy was successfully added to acp then a policyID is returned, + // otherwise if acp was not available then returns the following error: + // [client.ErrPolicyAddFailureNoACP] + // + // Detects the format of the policy automatically by assuming YAML format if JSON + // validation fails. + // + // Note: A policy can not be added without the creatorID (identity). + AddPolicy(ctx context.Context, policy string) (AddPolicyResult, error) } // Store contains the core DefraDB read-write operations. @@ -120,6 +129,17 @@ type Store interface { // A lens configuration may also be provided, it will be added to all collections using the schema. PatchSchema(context.Context, string, immutable.Option[model.Lens], bool) error + // PatchCollection takes the given JSON patch string and applies it to the set of CollectionDescriptions + // present in the database. + // + // It will also update the GQL types used by the query system. It will error and not apply any of the + // requested, valid updates should the net result of the patch result in an invalid state. The + // individual operations defined in the patch do not need to result in a valid state, only the net result + // of the full patch. + // + // Currently only the collection name can be modified. + PatchCollection(context.Context, string) error + // SetActiveSchemaVersion activates all collection versions with the given schema version, and deactivates all // those without it (if they share the same schema root). // @@ -216,7 +236,7 @@ type Store interface { GetAllIndexes(context.Context) (map[CollectionName][]IndexDescription, error) // ExecRequest executes the given GQL request against the [Store]. - ExecRequest(context.Context, string) *RequestResult + ExecRequest(ctx context.Context, request string) *RequestResult } // GQLResult represents the immediate results of a GQL request. diff --git a/client/definitions.go b/client/definitions.go index e521a69fcf..c04159f679 100644 --- a/client/definitions.go +++ b/client/definitions.go @@ -25,16 +25,28 @@ type CollectionDefinition struct { // GetFieldByName returns the field for the given field name. If such a field is found it // will return it and true, if it is not found it will return false. func (def CollectionDefinition) GetFieldByName(fieldName string) (FieldDefinition, bool) { - collectionField, ok := def.Description.GetFieldByName(fieldName) - if ok { - schemaField, ok := def.Schema.GetFieldByName(fieldName) - if ok { - return NewFieldDefinition( - collectionField, - schemaField, - ), true - } + collectionField, existsOnCollection := def.Description.GetFieldByName(fieldName) + schemaField, existsOnSchema := def.Schema.GetFieldByName(fieldName) + + if existsOnCollection && existsOnSchema { + return NewFieldDefinition( + collectionField, + schemaField, + ), true + } else if existsOnCollection && !existsOnSchema { + // If the field exists only on the collection, it is a local only field, for example the + // secondary side of a relation. + return NewLocalFieldDefinition( + collectionField, + ), true + } else if !existsOnCollection && existsOnSchema { + // If the field only exist on the schema it is likely that this is a schema-only object + // definition, for example for an embedded object. + return NewSchemaOnlyFieldDefinition( + schemaField, + ), true } + return FieldDefinition{}, false } @@ -42,6 +54,8 @@ func (def CollectionDefinition) GetFieldByName(fieldName string) (FieldDefinitio // as a single set. func (def CollectionDefinition) GetFields() []FieldDefinition { fields := []FieldDefinition{} + localFieldNames := map[string]struct{}{} + for _, localField := range def.Description.Fields { globalField, ok := def.Schema.GetFieldByName(localField.Name) if ok { @@ -49,11 +63,41 @@ func (def CollectionDefinition) GetFields() []FieldDefinition { fields, NewFieldDefinition(localField, globalField), ) + } else { + // This must be a local only field, for example the secondary side of a relation. + fields = append( + fields, + NewLocalFieldDefinition(localField), + ) + } + localFieldNames[localField.Name] = struct{}{} + } + + for _, schemaField := range def.Schema.Fields { + if _, ok := localFieldNames[schemaField.Name]; ok { + continue } + // This must be a global only field, for example on an embedded object. + fields = append( + fields, + NewSchemaOnlyFieldDefinition(schemaField), + ) } + return fields } +// GetName gets the name of this definition. +// +// If the collection description has a name (e.g. it is an active collection) it will return that, +// otherwise it will return the schema name. +func (def CollectionDefinition) GetName() string { + if def.Description.Name.HasValue() { + return def.Description.Name.Value() + } + return def.Schema.Name +} + // FieldDefinition describes the combined local and global set of properties that constitutes // a field on a collection. // @@ -78,10 +122,6 @@ type FieldDefinition struct { // Must contain a valid value. It is currently immutable. Kind FieldKind - // Schema contains the schema name of the type this field contains if this field is - // a relation field. Otherwise this will be empty. - Schema string - // RelationName the name of the relationship that this field represents if this field is // a relation field. Otherwise this will be empty. RelationName string @@ -98,14 +138,39 @@ type FieldDefinition struct { // NewFieldDefinition returns a new [FieldDefinition], combining the given local and global elements // into a single object. func NewFieldDefinition(local CollectionFieldDescription, global SchemaFieldDescription) FieldDefinition { + var kind FieldKind + if local.Kind.HasValue() { + kind = local.Kind.Value() + } else { + kind = global.Kind + } + return FieldDefinition{ Name: global.Name, ID: local.ID, - Kind: global.Kind, - Schema: global.Schema, - RelationName: global.RelationName, + Kind: kind, + RelationName: local.RelationName.Value(), Typ: global.Typ, - IsPrimaryRelation: global.IsPrimaryRelation, + IsPrimaryRelation: kind.IsObject() && !kind.IsArray(), + } +} + +// NewLocalFieldDefinition returns a new [FieldDefinition] from the given local [CollectionFieldDescription]. +func NewLocalFieldDefinition(local CollectionFieldDescription) FieldDefinition { + return FieldDefinition{ + Name: local.Name, + ID: local.ID, + Kind: local.Kind.Value(), + RelationName: local.RelationName.Value(), + } +} + +// NewSchemaOnlyFieldDefinition returns a new [FieldDefinition] from the given global [SchemaFieldDescription]. +func NewSchemaOnlyFieldDefinition(global SchemaFieldDescription) FieldDefinition { + return FieldDefinition{ + Name: global.Name, + Kind: global.Kind, + Typ: global.Typ, } } diff --git a/client/document.go b/client/document.go index 6c837260ba..4534e9fa33 100644 --- a/client/document.go +++ b/client/document.go @@ -66,28 +66,28 @@ type Document struct { // marks if document has unsaved changes isDirty bool - schemaDescription SchemaDescription + collectionDefinition CollectionDefinition } -func newEmptyDoc(sd SchemaDescription) *Document { +func newEmptyDoc(collectionDefinition CollectionDefinition) *Document { return &Document{ - fields: make(map[string]Field), - values: make(map[Field]*FieldValue), - schemaDescription: sd, + fields: make(map[string]Field), + values: make(map[Field]*FieldValue), + collectionDefinition: collectionDefinition, } } // NewDocWithID creates a new Document with a specified key. -func NewDocWithID(docID DocID, sd SchemaDescription) *Document { - doc := newEmptyDoc(sd) +func NewDocWithID(docID DocID, collectionDefinition CollectionDefinition) *Document { + doc := newEmptyDoc(collectionDefinition) doc.id = docID return doc } // NewDocFromMap creates a new Document from a data map. -func NewDocFromMap(data map[string]any, sd SchemaDescription) (*Document, error) { +func NewDocFromMap(data map[string]any, collectionDefinition CollectionDefinition) (*Document, error) { var err error - doc := newEmptyDoc(sd) + doc := newEmptyDoc(collectionDefinition) // check if document contains special _docID field k, hasDocID := data[request.DocIDFieldName] @@ -126,8 +126,8 @@ func IsJSONArray(obj []byte) bool { } // NewFromJSON creates a new instance of a Document from a raw JSON object byte array. -func NewDocFromJSON(obj []byte, sd SchemaDescription) (*Document, error) { - doc := newEmptyDoc(sd) +func NewDocFromJSON(obj []byte, collectionDefinition CollectionDefinition) (*Document, error) { + doc := newEmptyDoc(collectionDefinition) err := doc.SetWithJSON(obj) if err != nil { return nil, err @@ -141,7 +141,7 @@ func NewDocFromJSON(obj []byte, sd SchemaDescription) (*Document, error) { // ManyFromJSON creates a new slice of Documents from a raw JSON array byte array. // It will return an error if the given byte array is not a valid JSON array. -func NewDocsFromJSON(obj []byte, sd SchemaDescription) ([]*Document, error) { +func NewDocsFromJSON(obj []byte, collectionDefinition CollectionDefinition) ([]*Document, error) { v, err := fastjson.ParseBytes(obj) if err != nil { return nil, err @@ -157,7 +157,7 @@ func NewDocsFromJSON(obj []byte, sd SchemaDescription) ([]*Document, error) { if err != nil { return nil, err } - doc := newEmptyDoc(sd) + doc := newEmptyDoc(collectionDefinition) err = doc.setWithFastJSONObject(o) if err != nil { return nil, err @@ -172,80 +172,130 @@ func NewDocsFromJSON(obj []byte, sd SchemaDescription) ([]*Document, error) { return docs, nil } -// IsNillableKind returns true if the given FieldKind is nillable. -func IsNillableKind(kind FieldKind) bool { - switch kind { - case FieldKind_NILLABLE_STRING, FieldKind_NILLABLE_BLOB, FieldKind_NILLABLE_JSON, - FieldKind_NILLABLE_BOOL, FieldKind_NILLABLE_FLOAT, FieldKind_NILLABLE_DATETIME, - FieldKind_NILLABLE_INT: - return true - default: - return false - } -} - // validateFieldSchema takes a given value as an interface, // and ensures it matches the supplied field description. // It will do any minor parsing, like dates, and return // the typed value again as an interface. -func validateFieldSchema(val any, field SchemaFieldDescription) (any, error) { - if IsNillableKind(field.Kind) { +func validateFieldSchema(val any, field FieldDefinition) (NormalValue, error) { + if field.Kind.IsNillable() { if val == nil { - return nil, nil + return NewNormalNil(field.Kind) } if v, ok := val.(*fastjson.Value); ok && v.Type() == fastjson.TypeNull { - return nil, nil + return NewNormalNil(field.Kind) } } + if field.Kind.IsObjectArray() { + return nil, NewErrFieldNotExist(field.Name) + } + + if field.Kind.IsObject() { + v, err := getString(val) + if err != nil { + return nil, err + } + return NewNormalString(v), nil + } + switch field.Kind { case FieldKind_DocID, FieldKind_NILLABLE_STRING, FieldKind_NILLABLE_BLOB: - return getString(val) + v, err := getString(val) + if err != nil { + return nil, err + } + return NewNormalString(v), nil case FieldKind_STRING_ARRAY: - return getArray(val, getString) + v, err := getArray(val, getString) + if err != nil { + return nil, err + } + return NewNormalStringArray(v), nil case FieldKind_NILLABLE_STRING_ARRAY: - return getNillableArray(val, getString) + v, err := getNillableArray(val, getString) + if err != nil { + return nil, err + } + return NewNormalNillableStringArray(v), nil case FieldKind_NILLABLE_BOOL: - return getBool(val) + v, err := getBool(val) + if err != nil { + return nil, err + } + return NewNormalBool(v), nil case FieldKind_BOOL_ARRAY: - return getArray(val, getBool) + v, err := getArray(val, getBool) + if err != nil { + return nil, err + } + return NewNormalBoolArray(v), nil case FieldKind_NILLABLE_BOOL_ARRAY: - return getNillableArray(val, getBool) + v, err := getNillableArray(val, getBool) + if err != nil { + return nil, err + } + return NewNormalNillableBoolArray(v), nil case FieldKind_NILLABLE_FLOAT: - return getFloat64(val) + v, err := getFloat64(val) + if err != nil { + return nil, err + } + return NewNormalFloat(v), nil case FieldKind_FLOAT_ARRAY: - return getArray(val, getFloat64) + v, err := getArray(val, getFloat64) + if err != nil { + return nil, err + } + return NewNormalFloatArray(v), nil case FieldKind_NILLABLE_FLOAT_ARRAY: - return getNillableArray(val, getFloat64) + v, err := getNillableArray(val, getFloat64) + if err != nil { + return nil, err + } + return NewNormalNillableFloatArray(v), nil case FieldKind_NILLABLE_DATETIME: - return getDateTime(val) + v, err := getDateTime(val) + if err != nil { + return nil, err + } + return NewNormalTime(v), nil case FieldKind_NILLABLE_INT: - return getInt64(val) + v, err := getInt64(val) + if err != nil { + return nil, err + } + return NewNormalInt(v), nil case FieldKind_INT_ARRAY: - return getArray(val, getInt64) + v, err := getArray(val, getInt64) + if err != nil { + return nil, err + } + return NewNormalIntArray(v), nil case FieldKind_NILLABLE_INT_ARRAY: - return getNillableArray(val, getInt64) - - case FieldKind_FOREIGN_OBJECT: - return getString(val) - - case FieldKind_FOREIGN_OBJECT_ARRAY: - return nil, NewErrFieldOrAliasToFieldNotExist(field.Name) + v, err := getNillableArray(val, getInt64) + if err != nil { + return nil, err + } + return NewNormalNillableIntArray(v), nil case FieldKind_NILLABLE_JSON: - return getJSON(val) + v, err := getJSON(val) + if err != nil { + return nil, err + } + return NewNormalString(v), nil } return nil, NewErrUnhandledType("FieldKind", field.Kind) @@ -538,15 +588,15 @@ func (doc *Document) setWithFastJSONObject(obj *fastjson.Object) error { // Set the value of a field. func (doc *Document) Set(field string, value any) error { - fd, exists := doc.schemaDescription.GetFieldByName(field) + fd, exists := doc.collectionDefinition.GetFieldByName(field) if !exists { return NewErrFieldNotExist(field) } - if fd.IsRelation() && !fd.Kind.IsObjectArray() { + if fd.Kind.IsObject() && !fd.Kind.IsObjectArray() { if !strings.HasSuffix(field, request.RelatedObjectID) { field = field + request.RelatedObjectID } - fd, exists = doc.schemaDescription.GetFieldByName(field) + fd, exists = doc.collectionDefinition.GetFieldByName(field) if !exists { return NewErrFieldNotExist(field) } @@ -573,16 +623,13 @@ func (doc *Document) set(t CType, field string, value *FieldValue) error { return nil } -func (doc *Document) setCBOR(t CType, field string, val any) error { +func (doc *Document) setCBOR(t CType, field string, val NormalValue) error { value := NewFieldValue(t, val) return doc.set(t, field, value) } func (doc *Document) setAndParseObjectType(value map[string]any) error { for k, v := range value { - if v == nil { - continue - } err := doc.Set(k, v) if err != nil { return err diff --git a/client/document_test.go b/client/document_test.go index 593876705f..a70e868e0e 100644 --- a/client/document_test.go +++ b/client/document_test.go @@ -16,6 +16,8 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" + "github.com/sourcenetwork/immutable" + ccid "github.com/sourcenetwork/defradb/core/cid" ) @@ -27,8 +29,22 @@ var ( pref = ccid.NewDefaultSHA256PrefixV1() - schemaDescriptions = []SchemaDescription{ - { + def = CollectionDefinition{ + Description: CollectionDescription{ + Name: immutable.Some("User"), + Fields: []CollectionFieldDescription{ + { + Name: "Name", + }, + { + Name: "Age", + }, + { + Name: "Custom", + }, + }, + }, + Schema: SchemaDescription{ Name: "User", Fields: []SchemaFieldDescription{ { @@ -52,7 +68,7 @@ var ( ) func TestNewFromJSON(t *testing.T) { - doc, err := NewDocFromJSON(testJSONObj, schemaDescriptions[0]) + doc, err := NewDocFromJSON(testJSONObj, def) if err != nil { t.Error("Error creating new doc from JSON:", err) return @@ -90,7 +106,7 @@ func TestNewFromJSON(t *testing.T) { } func TestSetWithJSON(t *testing.T) { - doc, err := NewDocFromJSON(testJSONObj, schemaDescriptions[0]) + doc, err := NewDocFromJSON(testJSONObj, def) if err != nil { t.Error("Error creating new doc from JSON:", err) return @@ -137,7 +153,7 @@ func TestSetWithJSON(t *testing.T) { } func TestNewDocsFromJSON_WithObjectInsteadOfArray_Error(t *testing.T) { - _, err := NewDocsFromJSON(testJSONObj, schemaDescriptions[0]) + _, err := NewDocsFromJSON(testJSONObj, def) require.ErrorContains(t, err, "value doesn't contain array; it contains object") } @@ -147,7 +163,7 @@ func TestNewFromJSON_WithValidJSONFieldValue_NoError(t *testing.T) { "Age": 26, "Custom": "{\"tree\":\"maple\", \"age\": 260}" }`) - doc, err := NewDocFromJSON(objWithJSONField, schemaDescriptions[0]) + doc, err := NewDocFromJSON(objWithJSONField, def) if err != nil { t.Error("Error creating new doc from JSON:", err) return @@ -177,7 +193,7 @@ func TestNewFromJSON_WithInvalidJSONFieldValue_Error(t *testing.T) { "Age": 26, "Custom": "{\"tree\":\"maple, \"age\": 260}" }`) - _, err := NewDocFromJSON(objWithJSONField, schemaDescriptions[0]) + _, err := NewDocFromJSON(objWithJSONField, def) require.ErrorContains(t, err, "invalid JSON payload. Payload: {\"tree\":\"maple, \"age\": 260}") } @@ -187,6 +203,6 @@ func TestNewFromJSON_WithInvalidJSONFieldValueSimpleString_Error(t *testing.T) { "Age": 26, "Custom": "blah" }`) - _, err := NewDocFromJSON(objWithJSONField, schemaDescriptions[0]) + _, err := NewDocFromJSON(objWithJSONField, def) require.ErrorContains(t, err, "invalid JSON payload. Payload: blah") } diff --git a/client/errors.go b/client/errors.go index c86ac274c7..460392a030 100644 --- a/client/errors.go +++ b/client/errors.go @@ -22,15 +22,16 @@ const ( errParsingFailed string = "failed to parse argument" errUninitializeProperty string = "invalid state, required property is uninitialized" errMaxTxnRetries string = "reached maximum transaction reties" - errRelationOneSided string = "relation must be defined on both schemas" errCollectionNotFound string = "collection not found" - errFieldOrAliasToFieldNotExist string = "The given field or alias to field does not exist" errUnknownCRDT string = "unknown crdt" errCRDTKindMismatch string = "CRDT type %s can't be assigned to field kind %s" errInvalidCRDTType string = "CRDT type not supported" errFailedToUnmarshalCollection string = "failed to unmarshal collection json" errOperationNotPermittedOnNamelessCols string = "operation not permitted on nameless collection" errInvalidJSONPayload string = "invalid JSON payload" + errCanNotNormalizeValue string = "can not normalize value" + errCanNotTurnNormalValueIntoArray string = "can not turn normal value into array" + errCanNotMakeNormalNilFromFieldKind string = "can not make normal nil from field kind" ) // Errors returnable from this package. @@ -44,13 +45,17 @@ var ( ErrOperationNotPermittedOnNamelessCols = errors.New(errOperationNotPermittedOnNamelessCols) ErrFieldNotObject = errors.New("trying to access field on a non object type") ErrValueTypeMismatch = errors.New("value does not match indicated type") - ErrDocumentNotFound = errors.New("no document for the given ID exists") + ErrDocumentNotFoundOrNotAuthorized = errors.New("document not found or not authorized to access") + ErrPolicyAddFailureNoACP = errors.New("failure adding policy because ACP was not available") ErrInvalidUpdateTarget = errors.New("the target document to update is of invalid type") ErrInvalidUpdater = errors.New("the updater of a document is of invalid type") ErrInvalidDeleteTarget = errors.New("the target document to delete is of invalid type") ErrMalformedDocID = errors.New("malformed document ID, missing either version or cid") ErrInvalidDocIDVersion = errors.New("invalid document ID version") ErrInvalidJSONPayload = errors.New(errInvalidJSONPayload) + ErrCanNotNormalizeValue = errors.New(errCanNotNormalizeValue) + ErrCanNotTurnNormalValueIntoArray = errors.New(errCanNotTurnNormalValueIntoArray) + ErrCanNotMakeNormalNilFromFieldKind = errors.New(errCanNotMakeNormalNilFromFieldKind) ) // NewErrFieldNotExist returns an error indicating that the given field does not exist. @@ -75,6 +80,23 @@ func NewErrUnexpectedType[TExpected any](property string, actual any) error { ) } +// NewCanNotNormalizeValue returns an error indicating that the given value can not be normalized. +func NewCanNotNormalizeValue(val any) error { + return errors.New(errCanNotNormalizeValue, errors.NewKV("Value", val)) +} + +// NewCanNotTurnNormalValueIntoArray returns an error indicating that the given value can not be +// turned into an array. +func NewCanNotTurnNormalValueIntoArray(val any) error { + return errors.New(errCanNotTurnNormalValueIntoArray, errors.NewKV("Value", val)) +} + +// NewCanNotMakeNormalNilFromFieldKind returns an error indicating that a normal nil value can not be +// created from the given field kind. +func NewCanNotMakeNormalNilFromFieldKind(kind FieldKind) error { + return errors.New(errCanNotMakeNormalNilFromFieldKind, errors.NewKV("Kind", kind)) +} + // NewErrUnhandledType returns an error indicating that the given value is of // a type that is not handled. func NewErrUnhandledType(property string, actual any) error { @@ -106,14 +128,6 @@ func NewErrMaxTxnRetries(inner error) error { return errors.Wrap(errMaxTxnRetries, inner) } -func NewErrRelationOneSided(fieldName string, typeName string) error { - return errors.New( - errRelationOneSided, - errors.NewKV("Field", fieldName), - errors.NewKV("Type", typeName), - ) -} - func NewErrCollectionNotFoundForSchemaVersion(schemaVersionID string) error { return errors.New( errCollectionNotFound, @@ -135,11 +149,6 @@ func NewErrUnknownCRDT(cType CType) error { ) } -// NewErrFieldOrAliasToFieldNotExist returns an error indicating that the given field or an alias field does not exist. -func NewErrFieldOrAliasToFieldNotExist(name string) error { - return errors.New(errFieldOrAliasToFieldNotExist, errors.NewKV("Name", name)) -} - func NewErrInvalidCRDTType(name, crdtType string) error { return errors.New( errInvalidCRDTType, diff --git a/client/lens.go b/client/lens.go index 1a6b423991..3f5befc604 100644 --- a/client/lens.go +++ b/client/lens.go @@ -15,8 +15,6 @@ import ( "github.com/lens-vm/lens/host-go/config/model" "github.com/sourcenetwork/immutable/enumerable" - - "github.com/sourcenetwork/defradb/datastore" ) // LensConfig represents the configuration of a Lens migration in Defra. @@ -43,12 +41,6 @@ type LensConfig struct { // LensRegistry exposes several useful thread-safe migration related functions which may // be used to manage migrations. type LensRegistry interface { - // WithTxn returns a new LensRegistry scoped to the given transaction. - // - // WARNING: Currently this does not provide snapshot isolation, if other transactions are committed - // after this has been created, the results of those commits will be visible within this scope. - WithTxn(datastore.Txn) LensRegistry - // SetMigration caches the migration for the given collection ID. It does not persist the migration in long // term storage, for that one should call [Store.SetMigration(ctx, cfg)]. // diff --git a/client/mocks/collection.go b/client/mocks/collection.go index 6e6c7afae3..7c227edd2b 100644 --- a/client/mocks/collection.go +++ b/client/mocks/collection.go @@ -7,8 +7,6 @@ import ( client "github.com/sourcenetwork/defradb/client" - datastore "github.com/sourcenetwork/defradb/datastore" - immutable "github.com/sourcenetwork/immutable" mock "github.com/stretchr/testify/mock" @@ -27,13 +25,13 @@ func (_m *Collection) EXPECT() *Collection_Expecter { return &Collection_Expecter{mock: &_m.Mock} } -// Create provides a mock function with given fields: _a0, _a1 -func (_m *Collection) Create(_a0 context.Context, _a1 *client.Document) error { - ret := _m.Called(_a0, _a1) +// Create provides a mock function with given fields: ctx, doc +func (_m *Collection) Create(ctx context.Context, doc *client.Document) error { + ret := _m.Called(ctx, doc) var r0 error if rf, ok := ret.Get(0).(func(context.Context, *client.Document) error); ok { - r0 = rf(_a0, _a1) + r0 = rf(ctx, doc) } else { r0 = ret.Error(0) } @@ -47,13 +45,13 @@ type Collection_Create_Call struct { } // Create is a helper method to define mock.On call -// - _a0 context.Context -// - _a1 *client.Document -func (_e *Collection_Expecter) Create(_a0 interface{}, _a1 interface{}) *Collection_Create_Call { - return &Collection_Create_Call{Call: _e.mock.On("Create", _a0, _a1)} +// - ctx context.Context +// - doc *client.Document +func (_e *Collection_Expecter) Create(ctx interface{}, doc interface{}) *Collection_Create_Call { + return &Collection_Create_Call{Call: _e.mock.On("Create", ctx, doc)} } -func (_c *Collection_Create_Call) Run(run func(_a0 context.Context, _a1 *client.Document)) *Collection_Create_Call { +func (_c *Collection_Create_Call) Run(run func(ctx context.Context, doc *client.Document)) *Collection_Create_Call { _c.Call.Run(func(args mock.Arguments) { run(args[0].(context.Context), args[1].(*client.Document)) }) @@ -70,6 +68,49 @@ func (_c *Collection_Create_Call) RunAndReturn(run func(context.Context, *client return _c } +// CreateDocIndex provides a mock function with given fields: _a0, _a1 +func (_m *Collection) CreateDocIndex(_a0 context.Context, _a1 *client.Document) error { + ret := _m.Called(_a0, _a1) + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, *client.Document) error); ok { + r0 = rf(_a0, _a1) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// Collection_CreateDocIndex_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'CreateDocIndex' +type Collection_CreateDocIndex_Call struct { + *mock.Call +} + +// CreateDocIndex is a helper method to define mock.On call +// - _a0 context.Context +// - _a1 *client.Document +func (_e *Collection_Expecter) CreateDocIndex(_a0 interface{}, _a1 interface{}) *Collection_CreateDocIndex_Call { + return &Collection_CreateDocIndex_Call{Call: _e.mock.On("CreateDocIndex", _a0, _a1)} +} + +func (_c *Collection_CreateDocIndex_Call) Run(run func(_a0 context.Context, _a1 *client.Document)) *Collection_CreateDocIndex_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(*client.Document)) + }) + return _c +} + +func (_c *Collection_CreateDocIndex_Call) Return(_a0 error) *Collection_CreateDocIndex_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *Collection_CreateDocIndex_Call) RunAndReturn(run func(context.Context, *client.Document) error) *Collection_CreateDocIndex_Call { + _c.Call.Return(run) + return _c +} + // CreateIndex provides a mock function with given fields: _a0, _a1 func (_m *Collection) CreateIndex(_a0 context.Context, _a1 client.IndexDescription) (client.IndexDescription, error) { ret := _m.Called(_a0, _a1) @@ -123,13 +164,13 @@ func (_c *Collection_CreateIndex_Call) RunAndReturn(run func(context.Context, cl return _c } -// CreateMany provides a mock function with given fields: _a0, _a1 -func (_m *Collection) CreateMany(_a0 context.Context, _a1 []*client.Document) error { - ret := _m.Called(_a0, _a1) +// CreateMany provides a mock function with given fields: ctx, docs +func (_m *Collection) CreateMany(ctx context.Context, docs []*client.Document) error { + ret := _m.Called(ctx, docs) var r0 error if rf, ok := ret.Get(0).(func(context.Context, []*client.Document) error); ok { - r0 = rf(_a0, _a1) + r0 = rf(ctx, docs) } else { r0 = ret.Error(0) } @@ -143,13 +184,13 @@ type Collection_CreateMany_Call struct { } // CreateMany is a helper method to define mock.On call -// - _a0 context.Context -// - _a1 []*client.Document -func (_e *Collection_Expecter) CreateMany(_a0 interface{}, _a1 interface{}) *Collection_CreateMany_Call { - return &Collection_CreateMany_Call{Call: _e.mock.On("CreateMany", _a0, _a1)} +// - ctx context.Context +// - docs []*client.Document +func (_e *Collection_Expecter) CreateMany(ctx interface{}, docs interface{}) *Collection_CreateMany_Call { + return &Collection_CreateMany_Call{Call: _e.mock.On("CreateMany", ctx, docs)} } -func (_c *Collection_CreateMany_Call) Run(run func(_a0 context.Context, _a1 []*client.Document)) *Collection_CreateMany_Call { +func (_c *Collection_CreateMany_Call) Run(run func(ctx context.Context, docs []*client.Document)) *Collection_CreateMany_Call { _c.Call.Run(func(args mock.Arguments) { run(args[0].(context.Context), args[1].([]*client.Document)) }) @@ -207,23 +248,23 @@ func (_c *Collection_Definition_Call) RunAndReturn(run func() client.CollectionD return _c } -// Delete provides a mock function with given fields: _a0, _a1 -func (_m *Collection) Delete(_a0 context.Context, _a1 client.DocID) (bool, error) { - ret := _m.Called(_a0, _a1) +// Delete provides a mock function with given fields: ctx, docID +func (_m *Collection) Delete(ctx context.Context, docID client.DocID) (bool, error) { + ret := _m.Called(ctx, docID) var r0 bool var r1 error if rf, ok := ret.Get(0).(func(context.Context, client.DocID) (bool, error)); ok { - return rf(_a0, _a1) + return rf(ctx, docID) } if rf, ok := ret.Get(0).(func(context.Context, client.DocID) bool); ok { - r0 = rf(_a0, _a1) + r0 = rf(ctx, docID) } else { r0 = ret.Get(0).(bool) } if rf, ok := ret.Get(1).(func(context.Context, client.DocID) error); ok { - r1 = rf(_a0, _a1) + r1 = rf(ctx, docID) } else { r1 = ret.Error(1) } @@ -237,13 +278,13 @@ type Collection_Delete_Call struct { } // Delete is a helper method to define mock.On call -// - _a0 context.Context -// - _a1 client.DocID -func (_e *Collection_Expecter) Delete(_a0 interface{}, _a1 interface{}) *Collection_Delete_Call { - return &Collection_Delete_Call{Call: _e.mock.On("Delete", _a0, _a1)} +// - ctx context.Context +// - docID client.DocID +func (_e *Collection_Expecter) Delete(ctx interface{}, docID interface{}) *Collection_Delete_Call { + return &Collection_Delete_Call{Call: _e.mock.On("Delete", ctx, docID)} } -func (_c *Collection_Delete_Call) Run(run func(_a0 context.Context, _a1 client.DocID)) *Collection_Delete_Call { +func (_c *Collection_Delete_Call) Run(run func(ctx context.Context, docID client.DocID)) *Collection_Delete_Call { _c.Call.Run(func(args mock.Arguments) { run(args[0].(context.Context), args[1].(client.DocID)) }) @@ -260,167 +301,45 @@ func (_c *Collection_Delete_Call) RunAndReturn(run func(context.Context, client. return _c } -// DeleteWith provides a mock function with given fields: ctx, target -func (_m *Collection) DeleteWith(ctx context.Context, target interface{}) (*client.DeleteResult, error) { - ret := _m.Called(ctx, target) - - var r0 *client.DeleteResult - var r1 error - if rf, ok := ret.Get(0).(func(context.Context, interface{}) (*client.DeleteResult, error)); ok { - return rf(ctx, target) - } - if rf, ok := ret.Get(0).(func(context.Context, interface{}) *client.DeleteResult); ok { - r0 = rf(ctx, target) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(*client.DeleteResult) - } - } - - if rf, ok := ret.Get(1).(func(context.Context, interface{}) error); ok { - r1 = rf(ctx, target) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// Collection_DeleteWith_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DeleteWith' -type Collection_DeleteWith_Call struct { - *mock.Call -} - -// DeleteWith is a helper method to define mock.On call -// - ctx context.Context -// - target interface{} -func (_e *Collection_Expecter) DeleteWith(ctx interface{}, target interface{}) *Collection_DeleteWith_Call { - return &Collection_DeleteWith_Call{Call: _e.mock.On("DeleteWith", ctx, target)} -} - -func (_c *Collection_DeleteWith_Call) Run(run func(ctx context.Context, target interface{})) *Collection_DeleteWith_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(interface{})) - }) - return _c -} - -func (_c *Collection_DeleteWith_Call) Return(_a0 *client.DeleteResult, _a1 error) *Collection_DeleteWith_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *Collection_DeleteWith_Call) RunAndReturn(run func(context.Context, interface{}) (*client.DeleteResult, error)) *Collection_DeleteWith_Call { - _c.Call.Return(run) - return _c -} - -// DeleteWithDocID provides a mock function with given fields: _a0, _a1 -func (_m *Collection) DeleteWithDocID(_a0 context.Context, _a1 client.DocID) (*client.DeleteResult, error) { +// DeleteDocIndex provides a mock function with given fields: _a0, _a1 +func (_m *Collection) DeleteDocIndex(_a0 context.Context, _a1 *client.Document) error { ret := _m.Called(_a0, _a1) - var r0 *client.DeleteResult - var r1 error - if rf, ok := ret.Get(0).(func(context.Context, client.DocID) (*client.DeleteResult, error)); ok { - return rf(_a0, _a1) - } - if rf, ok := ret.Get(0).(func(context.Context, client.DocID) *client.DeleteResult); ok { - r0 = rf(_a0, _a1) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(*client.DeleteResult) - } - } - - if rf, ok := ret.Get(1).(func(context.Context, client.DocID) error); ok { - r1 = rf(_a0, _a1) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// Collection_DeleteWithDocID_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DeleteWithDocID' -type Collection_DeleteWithDocID_Call struct { - *mock.Call -} - -// DeleteWithDocID is a helper method to define mock.On call -// - _a0 context.Context -// - _a1 client.DocID -func (_e *Collection_Expecter) DeleteWithDocID(_a0 interface{}, _a1 interface{}) *Collection_DeleteWithDocID_Call { - return &Collection_DeleteWithDocID_Call{Call: _e.mock.On("DeleteWithDocID", _a0, _a1)} -} - -func (_c *Collection_DeleteWithDocID_Call) Run(run func(_a0 context.Context, _a1 client.DocID)) *Collection_DeleteWithDocID_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(client.DocID)) - }) - return _c -} - -func (_c *Collection_DeleteWithDocID_Call) Return(_a0 *client.DeleteResult, _a1 error) *Collection_DeleteWithDocID_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *Collection_DeleteWithDocID_Call) RunAndReturn(run func(context.Context, client.DocID) (*client.DeleteResult, error)) *Collection_DeleteWithDocID_Call { - _c.Call.Return(run) - return _c -} - -// DeleteWithDocIDs provides a mock function with given fields: _a0, _a1 -func (_m *Collection) DeleteWithDocIDs(_a0 context.Context, _a1 []client.DocID) (*client.DeleteResult, error) { - ret := _m.Called(_a0, _a1) - - var r0 *client.DeleteResult - var r1 error - if rf, ok := ret.Get(0).(func(context.Context, []client.DocID) (*client.DeleteResult, error)); ok { - return rf(_a0, _a1) - } - if rf, ok := ret.Get(0).(func(context.Context, []client.DocID) *client.DeleteResult); ok { + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, *client.Document) error); ok { r0 = rf(_a0, _a1) } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(*client.DeleteResult) - } - } - - if rf, ok := ret.Get(1).(func(context.Context, []client.DocID) error); ok { - r1 = rf(_a0, _a1) - } else { - r1 = ret.Error(1) + r0 = ret.Error(0) } - return r0, r1 + return r0 } -// Collection_DeleteWithDocIDs_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DeleteWithDocIDs' -type Collection_DeleteWithDocIDs_Call struct { +// Collection_DeleteDocIndex_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DeleteDocIndex' +type Collection_DeleteDocIndex_Call struct { *mock.Call } -// DeleteWithDocIDs is a helper method to define mock.On call +// DeleteDocIndex is a helper method to define mock.On call // - _a0 context.Context -// - _a1 []client.DocID -func (_e *Collection_Expecter) DeleteWithDocIDs(_a0 interface{}, _a1 interface{}) *Collection_DeleteWithDocIDs_Call { - return &Collection_DeleteWithDocIDs_Call{Call: _e.mock.On("DeleteWithDocIDs", _a0, _a1)} +// - _a1 *client.Document +func (_e *Collection_Expecter) DeleteDocIndex(_a0 interface{}, _a1 interface{}) *Collection_DeleteDocIndex_Call { + return &Collection_DeleteDocIndex_Call{Call: _e.mock.On("DeleteDocIndex", _a0, _a1)} } -func (_c *Collection_DeleteWithDocIDs_Call) Run(run func(_a0 context.Context, _a1 []client.DocID)) *Collection_DeleteWithDocIDs_Call { +func (_c *Collection_DeleteDocIndex_Call) Run(run func(_a0 context.Context, _a1 *client.Document)) *Collection_DeleteDocIndex_Call { _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].([]client.DocID)) + run(args[0].(context.Context), args[1].(*client.Document)) }) return _c } -func (_c *Collection_DeleteWithDocIDs_Call) Return(_a0 *client.DeleteResult, _a1 error) *Collection_DeleteWithDocIDs_Call { - _c.Call.Return(_a0, _a1) +func (_c *Collection_DeleteDocIndex_Call) Return(_a0 error) *Collection_DeleteDocIndex_Call { + _c.Call.Return(_a0) return _c } -func (_c *Collection_DeleteWithDocIDs_Call) RunAndReturn(run func(context.Context, []client.DocID) (*client.DeleteResult, error)) *Collection_DeleteWithDocIDs_Call { +func (_c *Collection_DeleteDocIndex_Call) RunAndReturn(run func(context.Context, *client.Document) error) *Collection_DeleteDocIndex_Call { _c.Call.Return(run) return _c } @@ -564,23 +483,23 @@ func (_c *Collection_DropIndex_Call) RunAndReturn(run func(context.Context, stri return _c } -// Exists provides a mock function with given fields: _a0, _a1 -func (_m *Collection) Exists(_a0 context.Context, _a1 client.DocID) (bool, error) { - ret := _m.Called(_a0, _a1) +// Exists provides a mock function with given fields: ctx, docID +func (_m *Collection) Exists(ctx context.Context, docID client.DocID) (bool, error) { + ret := _m.Called(ctx, docID) var r0 bool var r1 error if rf, ok := ret.Get(0).(func(context.Context, client.DocID) (bool, error)); ok { - return rf(_a0, _a1) + return rf(ctx, docID) } if rf, ok := ret.Get(0).(func(context.Context, client.DocID) bool); ok { - r0 = rf(_a0, _a1) + r0 = rf(ctx, docID) } else { r0 = ret.Get(0).(bool) } if rf, ok := ret.Get(1).(func(context.Context, client.DocID) error); ok { - r1 = rf(_a0, _a1) + r1 = rf(ctx, docID) } else { r1 = ret.Error(1) } @@ -594,13 +513,13 @@ type Collection_Exists_Call struct { } // Exists is a helper method to define mock.On call -// - _a0 context.Context -// - _a1 client.DocID -func (_e *Collection_Expecter) Exists(_a0 interface{}, _a1 interface{}) *Collection_Exists_Call { - return &Collection_Exists_Call{Call: _e.mock.On("Exists", _a0, _a1)} +// - ctx context.Context +// - docID client.DocID +func (_e *Collection_Expecter) Exists(ctx interface{}, docID interface{}) *Collection_Exists_Call { + return &Collection_Exists_Call{Call: _e.mock.On("Exists", ctx, docID)} } -func (_c *Collection_Exists_Call) Run(run func(_a0 context.Context, _a1 client.DocID)) *Collection_Exists_Call { +func (_c *Collection_Exists_Call) Run(run func(ctx context.Context, docID client.DocID)) *Collection_Exists_Call { _c.Call.Run(func(args mock.Arguments) { run(args[0].(context.Context), args[1].(client.DocID)) }) @@ -863,13 +782,13 @@ func (_c *Collection_Name_Call) RunAndReturn(run func() immutable.Option[string] return _c } -// Save provides a mock function with given fields: _a0, _a1 -func (_m *Collection) Save(_a0 context.Context, _a1 *client.Document) error { - ret := _m.Called(_a0, _a1) +// Save provides a mock function with given fields: ctx, doc +func (_m *Collection) Save(ctx context.Context, doc *client.Document) error { + ret := _m.Called(ctx, doc) var r0 error if rf, ok := ret.Get(0).(func(context.Context, *client.Document) error); ok { - r0 = rf(_a0, _a1) + r0 = rf(ctx, doc) } else { r0 = ret.Error(0) } @@ -883,13 +802,13 @@ type Collection_Save_Call struct { } // Save is a helper method to define mock.On call -// - _a0 context.Context -// - _a1 *client.Document -func (_e *Collection_Expecter) Save(_a0 interface{}, _a1 interface{}) *Collection_Save_Call { - return &Collection_Save_Call{Call: _e.mock.On("Save", _a0, _a1)} +// - ctx context.Context +// - doc *client.Document +func (_e *Collection_Expecter) Save(ctx interface{}, doc interface{}) *Collection_Save_Call { + return &Collection_Save_Call{Call: _e.mock.On("Save", ctx, doc)} } -func (_c *Collection_Save_Call) Run(run func(_a0 context.Context, _a1 *client.Document)) *Collection_Save_Call { +func (_c *Collection_Save_Call) Run(run func(ctx context.Context, doc *client.Document)) *Collection_Save_Call { _c.Call.Run(func(args mock.Arguments) { run(args[0].(context.Context), args[1].(*client.Document)) }) @@ -988,13 +907,13 @@ func (_c *Collection_SchemaRoot_Call) RunAndReturn(run func() string) *Collectio return _c } -// Update provides a mock function with given fields: _a0, _a1 -func (_m *Collection) Update(_a0 context.Context, _a1 *client.Document) error { - ret := _m.Called(_a0, _a1) +// Update provides a mock function with given fields: ctx, docs +func (_m *Collection) Update(ctx context.Context, docs *client.Document) error { + ret := _m.Called(ctx, docs) var r0 error if rf, ok := ret.Get(0).(func(context.Context, *client.Document) error); ok { - r0 = rf(_a0, _a1) + r0 = rf(ctx, docs) } else { r0 = ret.Error(0) } @@ -1008,13 +927,13 @@ type Collection_Update_Call struct { } // Update is a helper method to define mock.On call -// - _a0 context.Context -// - _a1 *client.Document -func (_e *Collection_Expecter) Update(_a0 interface{}, _a1 interface{}) *Collection_Update_Call { - return &Collection_Update_Call{Call: _e.mock.On("Update", _a0, _a1)} +// - ctx context.Context +// - docs *client.Document +func (_e *Collection_Expecter) Update(ctx interface{}, docs interface{}) *Collection_Update_Call { + return &Collection_Update_Call{Call: _e.mock.On("Update", ctx, docs)} } -func (_c *Collection_Update_Call) Run(run func(_a0 context.Context, _a1 *client.Document)) *Collection_Update_Call { +func (_c *Collection_Update_Call) Run(run func(ctx context.Context, docs *client.Document)) *Collection_Update_Call { _c.Call.Run(func(args mock.Arguments) { run(args[0].(context.Context), args[1].(*client.Document)) }) @@ -1031,170 +950,46 @@ func (_c *Collection_Update_Call) RunAndReturn(run func(context.Context, *client return _c } -// UpdateWith provides a mock function with given fields: ctx, target, updater -func (_m *Collection) UpdateWith(ctx context.Context, target interface{}, updater string) (*client.UpdateResult, error) { - ret := _m.Called(ctx, target, updater) - - var r0 *client.UpdateResult - var r1 error - if rf, ok := ret.Get(0).(func(context.Context, interface{}, string) (*client.UpdateResult, error)); ok { - return rf(ctx, target, updater) - } - if rf, ok := ret.Get(0).(func(context.Context, interface{}, string) *client.UpdateResult); ok { - r0 = rf(ctx, target, updater) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(*client.UpdateResult) - } - } - - if rf, ok := ret.Get(1).(func(context.Context, interface{}, string) error); ok { - r1 = rf(ctx, target, updater) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// Collection_UpdateWith_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'UpdateWith' -type Collection_UpdateWith_Call struct { - *mock.Call -} - -// UpdateWith is a helper method to define mock.On call -// - ctx context.Context -// - target interface{} -// - updater string -func (_e *Collection_Expecter) UpdateWith(ctx interface{}, target interface{}, updater interface{}) *Collection_UpdateWith_Call { - return &Collection_UpdateWith_Call{Call: _e.mock.On("UpdateWith", ctx, target, updater)} -} - -func (_c *Collection_UpdateWith_Call) Run(run func(ctx context.Context, target interface{}, updater string)) *Collection_UpdateWith_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(interface{}), args[2].(string)) - }) - return _c -} - -func (_c *Collection_UpdateWith_Call) Return(_a0 *client.UpdateResult, _a1 error) *Collection_UpdateWith_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *Collection_UpdateWith_Call) RunAndReturn(run func(context.Context, interface{}, string) (*client.UpdateResult, error)) *Collection_UpdateWith_Call { - _c.Call.Return(run) - return _c -} - -// UpdateWithDocID provides a mock function with given fields: ctx, docID, updater -func (_m *Collection) UpdateWithDocID(ctx context.Context, docID client.DocID, updater string) (*client.UpdateResult, error) { - ret := _m.Called(ctx, docID, updater) - - var r0 *client.UpdateResult - var r1 error - if rf, ok := ret.Get(0).(func(context.Context, client.DocID, string) (*client.UpdateResult, error)); ok { - return rf(ctx, docID, updater) - } - if rf, ok := ret.Get(0).(func(context.Context, client.DocID, string) *client.UpdateResult); ok { - r0 = rf(ctx, docID, updater) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(*client.UpdateResult) - } - } +// UpdateDocIndex provides a mock function with given fields: ctx, oldDoc, newDoc +func (_m *Collection) UpdateDocIndex(ctx context.Context, oldDoc *client.Document, newDoc *client.Document) error { + ret := _m.Called(ctx, oldDoc, newDoc) - if rf, ok := ret.Get(1).(func(context.Context, client.DocID, string) error); ok { - r1 = rf(ctx, docID, updater) + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, *client.Document, *client.Document) error); ok { + r0 = rf(ctx, oldDoc, newDoc) } else { - r1 = ret.Error(1) + r0 = ret.Error(0) } - return r0, r1 + return r0 } -// Collection_UpdateWithDocID_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'UpdateWithDocID' -type Collection_UpdateWithDocID_Call struct { +// Collection_UpdateDocIndex_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'UpdateDocIndex' +type Collection_UpdateDocIndex_Call struct { *mock.Call } -// UpdateWithDocID is a helper method to define mock.On call +// UpdateDocIndex is a helper method to define mock.On call // - ctx context.Context -// - docID client.DocID -// - updater string -func (_e *Collection_Expecter) UpdateWithDocID(ctx interface{}, docID interface{}, updater interface{}) *Collection_UpdateWithDocID_Call { - return &Collection_UpdateWithDocID_Call{Call: _e.mock.On("UpdateWithDocID", ctx, docID, updater)} +// - oldDoc *client.Document +// - newDoc *client.Document +func (_e *Collection_Expecter) UpdateDocIndex(ctx interface{}, oldDoc interface{}, newDoc interface{}) *Collection_UpdateDocIndex_Call { + return &Collection_UpdateDocIndex_Call{Call: _e.mock.On("UpdateDocIndex", ctx, oldDoc, newDoc)} } -func (_c *Collection_UpdateWithDocID_Call) Run(run func(ctx context.Context, docID client.DocID, updater string)) *Collection_UpdateWithDocID_Call { +func (_c *Collection_UpdateDocIndex_Call) Run(run func(ctx context.Context, oldDoc *client.Document, newDoc *client.Document)) *Collection_UpdateDocIndex_Call { _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(client.DocID), args[2].(string)) + run(args[0].(context.Context), args[1].(*client.Document), args[2].(*client.Document)) }) return _c } -func (_c *Collection_UpdateWithDocID_Call) Return(_a0 *client.UpdateResult, _a1 error) *Collection_UpdateWithDocID_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *Collection_UpdateWithDocID_Call) RunAndReturn(run func(context.Context, client.DocID, string) (*client.UpdateResult, error)) *Collection_UpdateWithDocID_Call { - _c.Call.Return(run) - return _c -} - -// UpdateWithDocIDs provides a mock function with given fields: _a0, _a1, _a2 -func (_m *Collection) UpdateWithDocIDs(_a0 context.Context, _a1 []client.DocID, _a2 string) (*client.UpdateResult, error) { - ret := _m.Called(_a0, _a1, _a2) - - var r0 *client.UpdateResult - var r1 error - if rf, ok := ret.Get(0).(func(context.Context, []client.DocID, string) (*client.UpdateResult, error)); ok { - return rf(_a0, _a1, _a2) - } - if rf, ok := ret.Get(0).(func(context.Context, []client.DocID, string) *client.UpdateResult); ok { - r0 = rf(_a0, _a1, _a2) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(*client.UpdateResult) - } - } - - if rf, ok := ret.Get(1).(func(context.Context, []client.DocID, string) error); ok { - r1 = rf(_a0, _a1, _a2) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// Collection_UpdateWithDocIDs_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'UpdateWithDocIDs' -type Collection_UpdateWithDocIDs_Call struct { - *mock.Call -} - -// UpdateWithDocIDs is a helper method to define mock.On call -// - _a0 context.Context -// - _a1 []client.DocID -// - _a2 string -func (_e *Collection_Expecter) UpdateWithDocIDs(_a0 interface{}, _a1 interface{}, _a2 interface{}) *Collection_UpdateWithDocIDs_Call { - return &Collection_UpdateWithDocIDs_Call{Call: _e.mock.On("UpdateWithDocIDs", _a0, _a1, _a2)} -} - -func (_c *Collection_UpdateWithDocIDs_Call) Run(run func(_a0 context.Context, _a1 []client.DocID, _a2 string)) *Collection_UpdateWithDocIDs_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].([]client.DocID), args[2].(string)) - }) - return _c -} - -func (_c *Collection_UpdateWithDocIDs_Call) Return(_a0 *client.UpdateResult, _a1 error) *Collection_UpdateWithDocIDs_Call { - _c.Call.Return(_a0, _a1) +func (_c *Collection_UpdateDocIndex_Call) Return(_a0 error) *Collection_UpdateDocIndex_Call { + _c.Call.Return(_a0) return _c } -func (_c *Collection_UpdateWithDocIDs_Call) RunAndReturn(run func(context.Context, []client.DocID, string) (*client.UpdateResult, error)) *Collection_UpdateWithDocIDs_Call { +func (_c *Collection_UpdateDocIndex_Call) RunAndReturn(run func(context.Context, *client.Document, *client.Document) error) *Collection_UpdateDocIndex_Call { _c.Call.Return(run) return _c } @@ -1255,50 +1050,6 @@ func (_c *Collection_UpdateWithFilter_Call) RunAndReturn(run func(context.Contex return _c } -// WithTxn provides a mock function with given fields: _a0 -func (_m *Collection) WithTxn(_a0 datastore.Txn) client.Collection { - ret := _m.Called(_a0) - - var r0 client.Collection - if rf, ok := ret.Get(0).(func(datastore.Txn) client.Collection); ok { - r0 = rf(_a0) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(client.Collection) - } - } - - return r0 -} - -// Collection_WithTxn_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'WithTxn' -type Collection_WithTxn_Call struct { - *mock.Call -} - -// WithTxn is a helper method to define mock.On call -// - _a0 datastore.Txn -func (_e *Collection_Expecter) WithTxn(_a0 interface{}) *Collection_WithTxn_Call { - return &Collection_WithTxn_Call{Call: _e.mock.On("WithTxn", _a0)} -} - -func (_c *Collection_WithTxn_Call) Run(run func(_a0 datastore.Txn)) *Collection_WithTxn_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(datastore.Txn)) - }) - return _c -} - -func (_c *Collection_WithTxn_Call) Return(_a0 client.Collection) *Collection_WithTxn_Call { - _c.Call.Return(_a0) - return _c -} - -func (_c *Collection_WithTxn_Call) RunAndReturn(run func(datastore.Txn) client.Collection) *Collection_WithTxn_Call { - _c.Call.Return(run) - return _c -} - // NewCollection creates a new instance of Collection. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. // The first argument is typically a *testing.T value. func NewCollection(t interface { diff --git a/client/mocks/db.go b/client/mocks/db.go index aeb54ea4cd..20b5988fe7 100644 --- a/client/mocks/db.go +++ b/client/mocks/db.go @@ -32,6 +32,59 @@ func (_m *DB) EXPECT() *DB_Expecter { return &DB_Expecter{mock: &_m.Mock} } +// AddPolicy provides a mock function with given fields: ctx, policy +func (_m *DB) AddPolicy(ctx context.Context, policy string) (client.AddPolicyResult, error) { + ret := _m.Called(ctx, policy) + + var r0 client.AddPolicyResult + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, string) (client.AddPolicyResult, error)); ok { + return rf(ctx, policy) + } + if rf, ok := ret.Get(0).(func(context.Context, string) client.AddPolicyResult); ok { + r0 = rf(ctx, policy) + } else { + r0 = ret.Get(0).(client.AddPolicyResult) + } + + if rf, ok := ret.Get(1).(func(context.Context, string) error); ok { + r1 = rf(ctx, policy) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// DB_AddPolicy_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'AddPolicy' +type DB_AddPolicy_Call struct { + *mock.Call +} + +// AddPolicy is a helper method to define mock.On call +// - ctx context.Context +// - policy string +func (_e *DB_Expecter) AddPolicy(ctx interface{}, policy interface{}) *DB_AddPolicy_Call { + return &DB_AddPolicy_Call{Call: _e.mock.On("AddPolicy", ctx, policy)} +} + +func (_c *DB_AddPolicy_Call) Run(run func(ctx context.Context, policy string)) *DB_AddPolicy_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(string)) + }) + return _c +} + +func (_c *DB_AddPolicy_Call) Return(_a0 client.AddPolicyResult, _a1 error) *DB_AddPolicy_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *DB_AddPolicy_Call) RunAndReturn(run func(context.Context, string) (client.AddPolicyResult, error)) *DB_AddPolicy_Call { + _c.Call.Return(run) + return _c +} + // AddSchema provides a mock function with given fields: _a0, _a1 func (_m *DB) AddSchema(_a0 context.Context, _a1 string) ([]client.CollectionDescription, error) { ret := _m.Called(_a0, _a1) @@ -346,13 +399,13 @@ func (_c *DB_Events_Call) RunAndReturn(run func() events.Events) *DB_Events_Call return _c } -// ExecRequest provides a mock function with given fields: _a0, _a1 -func (_m *DB) ExecRequest(_a0 context.Context, _a1 string) *client.RequestResult { - ret := _m.Called(_a0, _a1) +// ExecRequest provides a mock function with given fields: ctx, request +func (_m *DB) ExecRequest(ctx context.Context, request string) *client.RequestResult { + ret := _m.Called(ctx, request) var r0 *client.RequestResult if rf, ok := ret.Get(0).(func(context.Context, string) *client.RequestResult); ok { - r0 = rf(_a0, _a1) + r0 = rf(ctx, request) } else { if ret.Get(0) != nil { r0 = ret.Get(0).(*client.RequestResult) @@ -368,13 +421,13 @@ type DB_ExecRequest_Call struct { } // ExecRequest is a helper method to define mock.On call -// - _a0 context.Context -// - _a1 string -func (_e *DB_Expecter) ExecRequest(_a0 interface{}, _a1 interface{}) *DB_ExecRequest_Call { - return &DB_ExecRequest_Call{Call: _e.mock.On("ExecRequest", _a0, _a1)} +// - ctx context.Context +// - request string +func (_e *DB_Expecter) ExecRequest(ctx interface{}, request interface{}) *DB_ExecRequest_Call { + return &DB_ExecRequest_Call{Call: _e.mock.On("ExecRequest", ctx, request)} } -func (_c *DB_ExecRequest_Call) Run(run func(_a0 context.Context, _a1 string)) *DB_ExecRequest_Call { +func (_c *DB_ExecRequest_Call) Run(run func(ctx context.Context, request string)) *DB_ExecRequest_Call { _c.Call.Run(func(args mock.Arguments) { run(args[0].(context.Context), args[1].(string)) }) @@ -857,6 +910,49 @@ func (_c *DB_NewTxn_Call) RunAndReturn(run func(context.Context, bool) (datastor return _c } +// PatchCollection provides a mock function with given fields: _a0, _a1 +func (_m *DB) PatchCollection(_a0 context.Context, _a1 string) error { + ret := _m.Called(_a0, _a1) + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, string) error); ok { + r0 = rf(_a0, _a1) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// DB_PatchCollection_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'PatchCollection' +type DB_PatchCollection_Call struct { + *mock.Call +} + +// PatchCollection is a helper method to define mock.On call +// - _a0 context.Context +// - _a1 string +func (_e *DB_Expecter) PatchCollection(_a0 interface{}, _a1 interface{}) *DB_PatchCollection_Call { + return &DB_PatchCollection_Call{Call: _e.mock.On("PatchCollection", _a0, _a1)} +} + +func (_c *DB_PatchCollection_Call) Run(run func(_a0 context.Context, _a1 string)) *DB_PatchCollection_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(string)) + }) + return _c +} + +func (_c *DB_PatchCollection_Call) Return(_a0 error) *DB_PatchCollection_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *DB_PatchCollection_Call) RunAndReturn(run func(context.Context, string) error) *DB_PatchCollection_Call { + _c.Call.Return(run) + return _c +} + // PatchSchema provides a mock function with given fields: _a0, _a1, _a2, _a3 func (_m *DB) PatchSchema(_a0 context.Context, _a1 string, _a2 immutable.Option[model.Lens], _a3 bool) error { ret := _m.Called(_a0, _a1, _a2, _a3) @@ -1116,50 +1212,6 @@ func (_c *DB_SetMigration_Call) RunAndReturn(run func(context.Context, client.Le return _c } -// WithTxn provides a mock function with given fields: _a0 -func (_m *DB) WithTxn(_a0 datastore.Txn) client.Store { - ret := _m.Called(_a0) - - var r0 client.Store - if rf, ok := ret.Get(0).(func(datastore.Txn) client.Store); ok { - r0 = rf(_a0) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(client.Store) - } - } - - return r0 -} - -// DB_WithTxn_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'WithTxn' -type DB_WithTxn_Call struct { - *mock.Call -} - -// WithTxn is a helper method to define mock.On call -// - _a0 datastore.Txn -func (_e *DB_Expecter) WithTxn(_a0 interface{}) *DB_WithTxn_Call { - return &DB_WithTxn_Call{Call: _e.mock.On("WithTxn", _a0)} -} - -func (_c *DB_WithTxn_Call) Run(run func(_a0 datastore.Txn)) *DB_WithTxn_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(datastore.Txn)) - }) - return _c -} - -func (_c *DB_WithTxn_Call) Return(_a0 client.Store) *DB_WithTxn_Call { - _c.Call.Return(_a0) - return _c -} - -func (_c *DB_WithTxn_Call) RunAndReturn(run func(datastore.Txn) client.Store) *DB_WithTxn_Call { - _c.Call.Return(run) - return _c -} - // NewDB creates a new instance of DB. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. // The first argument is typically a *testing.T value. func NewDB(t interface { diff --git a/client/normal_array.go b/client/normal_array.go new file mode 100644 index 0000000000..00133a0f74 --- /dev/null +++ b/client/normal_array.go @@ -0,0 +1,149 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package client + +import ( + "time" + + "golang.org/x/exp/constraints" +) + +type baseArrayNormalValue[T any] struct { + NormalVoid + val T +} + +func (v baseArrayNormalValue[T]) Unwrap() any { + return v.val +} + +func (v baseArrayNormalValue[T]) IsArray() bool { + return true +} + +func newBaseArrayNormalValue[T any](val T) baseArrayNormalValue[T] { + return baseArrayNormalValue[T]{val: val} +} + +type normalBoolArray struct { + baseArrayNormalValue[[]bool] +} + +func (v normalBoolArray) BoolArray() ([]bool, bool) { + return v.val, true +} + +type normalIntArray struct { + baseArrayNormalValue[[]int64] +} + +func (v normalIntArray) IntArray() ([]int64, bool) { + return v.val, true +} + +type normalFloatArray struct { + baseArrayNormalValue[[]float64] +} + +func (v normalFloatArray) FloatArray() ([]float64, bool) { + return v.val, true +} + +type normalStringArray struct { + baseArrayNormalValue[[]string] +} + +func (v normalStringArray) StringArray() ([]string, bool) { + return v.val, true +} + +type normalBytesArray struct { + baseArrayNormalValue[[][]byte] +} + +func (v normalBytesArray) BytesArray() ([][]byte, bool) { + return v.val, true +} + +type normalTimeArray struct { + baseArrayNormalValue[[]time.Time] +} + +func (v normalTimeArray) TimeArray() ([]time.Time, bool) { + return v.val, true +} + +type normalDocumentArray struct { + baseArrayNormalValue[[]*Document] +} + +func (v normalDocumentArray) DocumentArray() ([]*Document, bool) { + return v.val, true +} + +// NewNormalBoolArray creates a new NormalValue that represents a `[]bool` value. +func NewNormalBoolArray(val []bool) NormalValue { + return normalBoolArray{newBaseArrayNormalValue(val)} +} + +// NewNormalIntArray creates a new NormalValue that represents a `[]int64` value. +func NewNormalIntArray[T constraints.Integer | constraints.Float](val []T) NormalValue { + return normalIntArray{newBaseArrayNormalValue(normalizeNumArr[int64](val))} +} + +// NewNormalFloatArray creates a new NormalValue that represents a `[]float64` value. +func NewNormalFloatArray[T constraints.Integer | constraints.Float](val []T) NormalValue { + return normalFloatArray{newBaseArrayNormalValue(normalizeNumArr[float64](val))} +} + +// NewNormalStringArray creates a new NormalValue that represents a `[]string` value. +func NewNormalStringArray[T string | []byte](val []T) NormalValue { + return normalStringArray{newBaseArrayNormalValue(normalizeCharsArr[string](val))} +} + +// NewNormalBytesArray creates a new NormalValue that represents a `[][]byte` value. +func NewNormalBytesArray[T string | []byte](val []T) NormalValue { + return normalBytesArray{newBaseArrayNormalValue(normalizeCharsArr[[]byte](val))} +} + +// NewNormalTimeArray creates a new NormalValue that represents a `[]time.Time` value. +func NewNormalTimeArray(val []time.Time) NormalValue { + return normalTimeArray{newBaseArrayNormalValue(val)} +} + +// NewNormalDocumentArray creates a new NormalValue that represents a `[]*Document` value. +func NewNormalDocumentArray(val []*Document) NormalValue { + return normalDocumentArray{newBaseArrayNormalValue(val)} +} + +func normalizeNumArr[R int64 | float64, T constraints.Integer | constraints.Float](val []T) []R { + var v any = val + if arr, ok := v.([]R); ok { + return arr + } + arr := make([]R, len(val)) + for i, v := range val { + arr[i] = R(v) + } + return arr +} + +func normalizeCharsArr[R string | []byte, T string | []byte](val []T) []R { + var v any = val + if arr, ok := v.([]R); ok { + return arr + } + arr := make([]R, len(val)) + for i, v := range val { + arr[i] = R(v) + } + return arr +} diff --git a/client/normal_array_of_nillables.go b/client/normal_array_of_nillables.go new file mode 100644 index 0000000000..53461f6afa --- /dev/null +++ b/client/normal_array_of_nillables.go @@ -0,0 +1,142 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package client + +import ( + "time" + + "github.com/sourcenetwork/immutable" + "golang.org/x/exp/constraints" +) + +type normalNillableBoolArray struct { + baseArrayNormalValue[[]immutable.Option[bool]] +} + +func (v normalNillableBoolArray) NillableBoolArray() ([]immutable.Option[bool], bool) { + return v.val, true +} + +type normalNillableIntArray struct { + baseArrayNormalValue[[]immutable.Option[int64]] +} + +func (v normalNillableIntArray) NillableIntArray() ([]immutable.Option[int64], bool) { + return v.val, true +} + +type normalNillableFloatArray struct { + baseArrayNormalValue[[]immutable.Option[float64]] +} + +func (v normalNillableFloatArray) NillableFloatArray() ([]immutable.Option[float64], bool) { + return v.val, true +} + +type normalNillableStringArray struct { + baseArrayNormalValue[[]immutable.Option[string]] +} + +func (v normalNillableStringArray) NillableStringArray() ([]immutable.Option[string], bool) { + return v.val, true +} + +type normalNillableBytesArray struct { + baseArrayNormalValue[[]immutable.Option[[]byte]] +} + +func (v normalNillableBytesArray) NillableBytesArray() ([]immutable.Option[[]byte], bool) { + return v.val, true +} + +type normalNillableTimeArray struct { + baseArrayNormalValue[[]immutable.Option[time.Time]] +} + +func (v normalNillableTimeArray) NillableTimeArray() ([]immutable.Option[time.Time], bool) { + return v.val, true +} + +type normalNillableDocumentArray struct { + baseArrayNormalValue[[]immutable.Option[*Document]] +} + +func (v normalNillableDocumentArray) NillableDocumentArray() ([]immutable.Option[*Document], bool) { + return v.val, true +} + +// NewNormalNillableBoolNillableArray creates a new NormalValue that represents a +// `immutable.Option[[]immutable.Option[bool]]` value. +func NewNormalNillableBoolArray(val []immutable.Option[bool]) NormalValue { + return normalNillableBoolArray{newBaseArrayNormalValue(val)} +} + +// NewNormalNillableIntArray creates a new NormalValue that represents a `[]immutable.Option[int64]` value. +func NewNormalNillableIntArray[T constraints.Integer | constraints.Float](val []immutable.Option[T]) NormalValue { + return normalNillableIntArray{newBaseArrayNormalValue(normalizeNillableNumArr[int64](val))} +} + +// NewNormalNillableFloatArray creates a new NormalValue that represents a `[]immutable.Option[float64]` value. +func NewNormalNillableFloatArray[T constraints.Integer | constraints.Float]( + val []immutable.Option[T], +) NormalValue { + return normalNillableFloatArray{newBaseArrayNormalValue(normalizeNillableNumArr[float64](val))} +} + +// NewNormalNillableStringArray creates a new NormalValue that represents a `[]immutable.Option[string]` value. +func NewNormalNillableStringArray[T string | []byte](val []immutable.Option[T]) NormalValue { + return normalNillableStringArray{newBaseArrayNormalValue(normalizeNillableCharsArr[string](val))} +} + +// NewNormalNillableBytesArray creates a new NormalValue that represents a `[]immutable.Option[[]byte]` value. +func NewNormalNillableBytesArray[T string | []byte](val []immutable.Option[T]) NormalValue { + return normalNillableBytesArray{newBaseArrayNormalValue(normalizeNillableCharsArr[[]byte](val))} +} + +// NewNormalNillableTimeArray creates a new NormalValue that represents a `[]immutable.Option[time.Time]` value. +func NewNormalNillableTimeArray(val []immutable.Option[time.Time]) NormalValue { + return normalNillableTimeArray{newBaseArrayNormalValue(val)} +} + +// NewNormalNillableDocumentArray creates a new NormalValue that represents a `[]immutable.Option[*Document]` value. +func NewNormalNillableDocumentArray(val []immutable.Option[*Document]) NormalValue { + return normalNillableDocumentArray{newBaseArrayNormalValue(val)} +} + +func normalizeNillableNumArr[R int64 | float64, T constraints.Integer | constraints.Float]( + val []immutable.Option[T], +) []immutable.Option[R] { + var v any = val + if arr, ok := v.([]immutable.Option[R]); ok { + return arr + } + arr := make([]immutable.Option[R], len(val)) + for i, v := range val { + arr[i] = normalizeNillableNum[R](v) + } + return arr +} + +func normalizeNillableCharsArr[R string | []byte, T string | []byte](val []immutable.Option[T]) []immutable.Option[R] { + var v any = val + if arr, ok := v.([]immutable.Option[R]); ok { + return arr + } + arr := make([]immutable.Option[R], len(val)) + for i, v := range val { + if v.HasValue() { + arr[i] = immutable.Some(R(v.Value())) + } else { + arr[i] = immutable.None[R]() + } + } + return arr +} diff --git a/client/normal_new.go b/client/normal_new.go new file mode 100644 index 0000000000..55ac46ce73 --- /dev/null +++ b/client/normal_new.go @@ -0,0 +1,465 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package client + +import ( + "time" + + "github.com/sourcenetwork/immutable" +) + +// NewNormalValue creates a new NormalValue from the given value. +// It will normalize all known types that can be converted to normal ones. +// For example, if the given type is `[]int32`, it will be converted to `[]int64`. +// If the given value is of type `[]any` it will go through every element and try to convert it +// the most common type and normalizes it. +// For examples, the following conversions will be made: +// - `[]any{int32(1), int64(2)}` -> `[]int64{1, 2}`. +// - `[]any{int32(1), int64(2), float32(1.5)}` -> `[]float64{1.0, 2.0, 1.5}`. +// - `[]any{int32(1), nil}` -> `[]immutable.Option[int64]{immutable.Some(1), immutable.None[int64]()}`. +// +// This function will not check if the given value is `nil`. To normalize a `nil` value use the +// `NewNormalNil` function. +func NewNormalValue(val any) (NormalValue, error) { + switch v := val.(type) { + case bool: + return NewNormalBool(v), nil + case int8: + return newNormalInt(int64(v)), nil + case int16: + return newNormalInt(int64(v)), nil + case int32: + return newNormalInt(int64(v)), nil + case int64: + return newNormalInt(v), nil + case int: + return newNormalInt(int64(v)), nil + case uint8: + return newNormalInt(int64(v)), nil + case uint16: + return newNormalInt(int64(v)), nil + case uint32: + return newNormalInt(int64(v)), nil + case uint64: + return newNormalInt(int64(v)), nil + case uint: + return newNormalInt(int64(v)), nil + case float32: + return newNormalFloat(float64(v)), nil + case float64: + return newNormalFloat(v), nil + case string: + return NewNormalString(v), nil + case []byte: + return NewNormalBytes(v), nil + case time.Time: + return NewNormalTime(v), nil + case *Document: + return NewNormalDocument(v), nil + + case immutable.Option[bool]: + return NewNormalNillableBool(v), nil + case immutable.Option[int8]: + return NewNormalNillableInt(v), nil + case immutable.Option[int16]: + return NewNormalNillableInt(v), nil + case immutable.Option[int32]: + return NewNormalNillableInt(v), nil + case immutable.Option[int64]: + return NewNormalNillableInt(v), nil + case immutable.Option[int]: + return NewNormalNillableInt(v), nil + case immutable.Option[uint8]: + return NewNormalNillableInt(v), nil + case immutable.Option[uint16]: + return NewNormalNillableInt(v), nil + case immutable.Option[uint32]: + return NewNormalNillableInt(v), nil + case immutable.Option[uint64]: + return NewNormalNillableInt(v), nil + case immutable.Option[uint]: + return NewNormalNillableInt(v), nil + case immutable.Option[float32]: + return NewNormalNillableFloat(v), nil + case immutable.Option[float64]: + return NewNormalNillableFloat(v), nil + case immutable.Option[string]: + return NewNormalNillableString(v), nil + case immutable.Option[[]byte]: + return NewNormalNillableBytes(v), nil + case immutable.Option[time.Time]: + return NewNormalNillableTime(v), nil + case immutable.Option[*Document]: + return NewNormalNillableDocument(v), nil + + case []bool: + return NewNormalBoolArray(v), nil + case []int8: + return NewNormalIntArray(v), nil + case []int16: + return NewNormalIntArray(v), nil + case []int32: + return NewNormalIntArray(v), nil + case []int64: + return NewNormalIntArray(v), nil + case []int: + return NewNormalIntArray(v), nil + case []uint16: + return NewNormalIntArray(v), nil + case []uint32: + return NewNormalIntArray(v), nil + case []uint64: + return NewNormalIntArray(v), nil + case []uint: + return NewNormalIntArray(v), nil + case []float32: + return NewNormalFloatArray(v), nil + case []float64: + return NewNormalFloatArray(v), nil + case []string: + return NewNormalStringArray(v), nil + case [][]byte: + return NewNormalBytesArray(v), nil + case []time.Time: + return NewNormalTimeArray(v), nil + case []*Document: + return NewNormalDocumentArray(v), nil + + case []immutable.Option[bool]: + return NewNormalNillableBoolArray(v), nil + case []immutable.Option[int8]: + return NewNormalNillableIntArray(v), nil + case []immutable.Option[int16]: + return NewNormalNillableIntArray(v), nil + case []immutable.Option[int32]: + return NewNormalNillableIntArray(v), nil + case []immutable.Option[int64]: + return NewNormalNillableIntArray(v), nil + case []immutable.Option[int]: + return NewNormalNillableIntArray(v), nil + case []immutable.Option[uint8]: + return NewNormalNillableIntArray(v), nil + case []immutable.Option[uint16]: + return NewNormalNillableIntArray(v), nil + case []immutable.Option[uint32]: + return NewNormalNillableIntArray(v), nil + case []immutable.Option[uint64]: + return NewNormalNillableIntArray(v), nil + case []immutable.Option[uint]: + return NewNormalNillableIntArray(v), nil + case []immutable.Option[float32]: + return NewNormalNillableFloatArray(v), nil + case []immutable.Option[float64]: + return NewNormalNillableFloatArray(v), nil + case []immutable.Option[string]: + return NewNormalNillableStringArray(v), nil + case []immutable.Option[[]byte]: + return NewNormalNillableBytesArray(v), nil + case []immutable.Option[time.Time]: + return NewNormalNillableTimeArray(v), nil + case []immutable.Option[*Document]: + return NewNormalNillableDocumentArray(v), nil + + case immutable.Option[[]bool]: + return NewNormalBoolNillableArray(v), nil + case immutable.Option[[]int8]: + return NewNormalIntNillableArray(v), nil + case immutable.Option[[]int16]: + return NewNormalIntNillableArray(v), nil + case immutable.Option[[]int32]: + return NewNormalIntNillableArray(v), nil + case immutable.Option[[]int64]: + return NewNormalIntNillableArray(v), nil + case immutable.Option[[]int]: + return NewNormalIntNillableArray(v), nil + case immutable.Option[[]uint16]: + return NewNormalIntNillableArray(v), nil + case immutable.Option[[]uint32]: + return NewNormalIntNillableArray(v), nil + case immutable.Option[[]uint64]: + return NewNormalIntNillableArray(v), nil + case immutable.Option[[]uint]: + return NewNormalIntNillableArray(v), nil + case immutable.Option[[]float32]: + return NewNormalFloatNillableArray(v), nil + case immutable.Option[[]float64]: + return NewNormalFloatNillableArray(v), nil + case immutable.Option[[]string]: + return NewNormalStringNillableArray(v), nil + case immutable.Option[[][]byte]: + return NewNormalBytesNillableArray(v), nil + case immutable.Option[[]time.Time]: + return NewNormalTimeNillableArray(v), nil + case immutable.Option[[]*Document]: + return NewNormalDocumentNillableArray(v), nil + + case immutable.Option[[]immutable.Option[bool]]: + return NewNormalNillableBoolNillableArray(v), nil + case immutable.Option[[]immutable.Option[int8]]: + return NewNormalNillableIntNillableArray(v), nil + case immutable.Option[[]immutable.Option[int16]]: + return NewNormalNillableIntNillableArray(v), nil + case immutable.Option[[]immutable.Option[int32]]: + return NewNormalNillableIntNillableArray(v), nil + case immutable.Option[[]immutable.Option[int64]]: + return NewNormalNillableIntNillableArray(v), nil + case immutable.Option[[]immutable.Option[int]]: + return NewNormalNillableIntNillableArray(v), nil + case immutable.Option[[]immutable.Option[uint8]]: + return NewNormalNillableIntNillableArray(v), nil + case immutable.Option[[]immutable.Option[uint16]]: + return NewNormalNillableIntNillableArray(v), nil + case immutable.Option[[]immutable.Option[uint32]]: + return NewNormalNillableIntNillableArray(v), nil + case immutable.Option[[]immutable.Option[uint64]]: + return NewNormalNillableIntNillableArray(v), nil + case immutable.Option[[]immutable.Option[uint]]: + return NewNormalNillableIntNillableArray(v), nil + case immutable.Option[[]immutable.Option[float32]]: + return NewNormalNillableFloatNillableArray(v), nil + case immutable.Option[[]immutable.Option[float64]]: + return NewNormalNillableFloatNillableArray(v), nil + case immutable.Option[[]immutable.Option[string]]: + return NewNormalNillableStringNillableArray(v), nil + case immutable.Option[[]immutable.Option[[]byte]]: + return NewNormalNillableBytesNillableArray(v), nil + case immutable.Option[[]immutable.Option[time.Time]]: + return NewNormalNillableTimeNillableArray(v), nil + case immutable.Option[[]immutable.Option[*Document]]: + return NewNormalNillableDocumentNillableArray(v), nil + + case []any: + if len(v) == 0 { + return nil, NewCanNotNormalizeValue(val) + } + first, err := NewNormalValue(v[0]) + if err != nil { + return nil, err + } + if _, ok := first.Bool(); ok { + return convertAnyArrToTypedArr[bool](v, NewNormalBoolArray, NewNormalNillableBoolArray) + } + if _, ok := first.Int(); ok { + return convertAnyArrToIntOrFloatArr(v) + } + if _, ok := first.Float(); ok { + return convertAnyArrToFloatArr(v) + } + if _, ok := first.String(); ok { + return convertAnyArrToTypedArr[string](v, NewNormalStringArray, NewNormalNillableStringArray) + } + if _, ok := first.Bytes(); ok { + return convertAnyArrToTypedArr[[]byte](v, NewNormalBytesArray, NewNormalNillableBytesArray) + } + if _, ok := first.Time(); ok { + return convertAnyArrToTypedArr[time.Time](v, NewNormalTimeArray, NewNormalNillableTimeArray) + } + if _, ok := first.Document(); ok { + return convertAnyArrToTypedArr[*Document](v, NewNormalDocumentArray, NewNormalNillableDocumentArray) + } + } + return nil, NewCanNotNormalizeValue(val) +} + +func convertAnyArrToIntOrFloatArr(arr []any) (NormalValue, error) { + result := make([]int64, len(arr)) + for i := range arr { + if arr[i] == nil { + return convertAnyArrToNillableIntOrFloatArr(arr) + } + switch v := arr[i].(type) { + case int64: + result[i] = v + case float64, float32: + return convertAnyArrToFloatArr(arr) + case int8: + result[i] = int64(v) + case int16: + result[i] = int64(v) + case int32: + result[i] = int64(v) + case int: + result[i] = int64(v) + case uint8: + result[i] = int64(v) + case uint16: + result[i] = int64(v) + case uint32: + result[i] = int64(v) + case uint64: + result[i] = int64(v) + case uint: + result[i] = int64(v) + default: + return nil, NewCanNotNormalizeValue(arr) + } + } + return NewNormalIntArray(result), nil +} + +func convertAnyArrToNillableIntOrFloatArr(arr []any) (NormalValue, error) { + result := make([]immutable.Option[int64], len(arr)) + for i := range arr { + if arr[i] == nil { + result[i] = immutable.None[int64]() + continue + } + var intVal int64 + switch v := arr[i].(type) { + case int64: + intVal = v + case float64, float32: + return convertAnyArrToFloatArr(arr) + case int8: + intVal = int64(v) + case int16: + intVal = int64(v) + case int32: + intVal = int64(v) + case int: + intVal = int64(v) + case uint8: + intVal = int64(v) + case uint16: + intVal = int64(v) + case uint32: + intVal = int64(v) + case uint64: + intVal = int64(v) + case uint: + intVal = int64(v) + default: + return nil, NewCanNotNormalizeValue(arr) + } + result[i] = immutable.Some(intVal) + } + return NewNormalNillableIntArray(result), nil +} + +func convertAnyArrToFloatArr(arr []any) (NormalValue, error) { + result := make([]float64, len(arr)) + for i := range arr { + if arr[i] == nil { + return convertAnyArrToNillableFloatArr(arr) + } + + var floatVal float64 + switch v := arr[i].(type) { + case float64: + floatVal = v + case float32: + floatVal = float64(v) + case int8: + floatVal = float64(v) + case int16: + floatVal = float64(v) + case int32: + floatVal = float64(v) + case int64: + floatVal = float64(v) + case int: + floatVal = float64(v) + case uint8: + floatVal = float64(v) + case uint16: + floatVal = float64(v) + case uint32: + floatVal = float64(v) + case uint64: + floatVal = float64(v) + case uint: + floatVal = float64(v) + default: + return nil, NewCanNotNormalizeValue(arr) + } + result[i] = floatVal + } + return NewNormalFloatArray(result), nil +} + +func convertAnyArrToNillableFloatArr(arr []any) (NormalValue, error) { + result := make([]immutable.Option[float64], len(arr)) + for i := range arr { + if arr[i] == nil { + result[i] = immutable.None[float64]() + continue + } + var floatVal float64 + switch v := arr[i].(type) { + case float64: + floatVal = v + case float32: + floatVal = float64(v) + case int8: + floatVal = float64(v) + case int16: + floatVal = float64(v) + case int32: + floatVal = float64(v) + case int64: + floatVal = float64(v) + case int: + floatVal = float64(v) + case uint8: + floatVal = float64(v) + case uint16: + floatVal = float64(v) + case uint32: + floatVal = float64(v) + case uint64: + floatVal = float64(v) + case uint: + floatVal = float64(v) + default: + return nil, NewCanNotNormalizeValue(arr) + } + result[i] = immutable.Some(floatVal) + } + return NewNormalNillableFloatArray(result), nil +} + +func convertAnyArrToTypedArr[T any]( + arr []any, + newNormalArr func([]T) NormalValue, + newNormalNillableArr func([]immutable.Option[T]) NormalValue, +) (NormalValue, error) { + result := make([]T, len(arr)) + for i := range arr { + if arr[i] == nil { + return convertAnyArrToNillableTypedArr[T](arr, newNormalNillableArr) + } + if v, ok := arr[i].(T); ok { + result[i] = v + } else { + return nil, NewCanNotNormalizeValue(arr) + } + } + return newNormalArr(result), nil +} + +func convertAnyArrToNillableTypedArr[T any]( + arr []any, + newNormalNillableArr func([]immutable.Option[T]) NormalValue, +) (NormalValue, error) { + result := make([]immutable.Option[T], len(arr)) + for i := range arr { + if arr[i] == nil { + result[i] = immutable.None[T]() + continue + } + if v, ok := arr[i].(T); ok { + result[i] = immutable.Some(v) + } else { + return nil, NewCanNotNormalizeValue(arr) + } + } + return newNormalNillableArr(result), nil +} diff --git a/client/normal_nil.go b/client/normal_nil.go new file mode 100644 index 0000000000..7cd2df3f16 --- /dev/null +++ b/client/normal_nil.go @@ -0,0 +1,56 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package client + +import ( + "time" + + "github.com/sourcenetwork/immutable" +) + +// NewNormalNil creates a new NormalValue that represents a nil value of a given field kind. +func NewNormalNil(kind FieldKind) (NormalValue, error) { + if kind.IsObject() { + return NewNormalNillableDocument(immutable.None[*Document]()), nil + } + switch kind { + case FieldKind_NILLABLE_BOOL: + return NewNormalNillableBool(immutable.None[bool]()), nil + case FieldKind_NILLABLE_INT: + return NewNormalNillableInt(immutable.None[int64]()), nil + case FieldKind_NILLABLE_FLOAT: + return NewNormalNillableFloat(immutable.None[float64]()), nil + case FieldKind_NILLABLE_DATETIME: + return NewNormalNillableTime(immutable.None[time.Time]()), nil + case FieldKind_NILLABLE_STRING, FieldKind_NILLABLE_JSON: + return NewNormalNillableString(immutable.None[string]()), nil + case FieldKind_NILLABLE_BLOB: + return NewNormalNillableBytes(immutable.None[[]byte]()), nil + case FieldKind_BOOL_ARRAY: + return NewNormalBoolNillableArray(immutable.None[[]bool]()), nil + case FieldKind_INT_ARRAY: + return NewNormalIntNillableArray(immutable.None[[]int64]()), nil + case FieldKind_FLOAT_ARRAY: + return NewNormalFloatNillableArray(immutable.None[[]float64]()), nil + case FieldKind_STRING_ARRAY: + return NewNormalStringNillableArray(immutable.None[[]string]()), nil + case FieldKind_NILLABLE_BOOL_ARRAY: + return NewNormalNillableBoolNillableArray(immutable.None[[]immutable.Option[bool]]()), nil + case FieldKind_NILLABLE_INT_ARRAY: + return NewNormalNillableIntNillableArray(immutable.None[[]immutable.Option[int]]()), nil + case FieldKind_NILLABLE_FLOAT_ARRAY: + return NewNormalNillableFloatNillableArray(immutable.None[[]immutable.Option[float64]]()), nil + case FieldKind_NILLABLE_STRING_ARRAY: + return NewNormalNillableStringNillableArray(immutable.None[[]immutable.Option[string]]()), nil + default: + return nil, NewCanNotMakeNormalNilFromFieldKind(kind) + } +} diff --git a/client/normal_nillable_array.go b/client/normal_nillable_array.go new file mode 100644 index 0000000000..fa6bdc4bbb --- /dev/null +++ b/client/normal_nillable_array.go @@ -0,0 +1,152 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package client + +import ( + "time" + + "github.com/sourcenetwork/immutable" + "golang.org/x/exp/constraints" +) + +type baseNillableArrayNormalValue[T any] struct { + baseArrayNormalValue[immutable.Option[T]] +} + +func (v baseNillableArrayNormalValue[T]) Unwrap() any { + if v.val.HasValue() { + return v.val.Value() + } + return nil +} + +func (v baseNillableArrayNormalValue[T]) IsNil() bool { + return !v.val.HasValue() +} + +func (v baseNillableArrayNormalValue[T]) IsNillable() bool { + return true +} + +func (v baseNillableArrayNormalValue[T]) IsArray() bool { + return true +} + +func newBaseNillableArrayNormalValue[T any](val immutable.Option[T]) baseNillableArrayNormalValue[T] { + return baseNillableArrayNormalValue[T]{newBaseArrayNormalValue(val)} +} + +type normalBoolNillableArray struct { + baseNillableArrayNormalValue[[]bool] +} + +func (v normalBoolNillableArray) BoolNillableArray() (immutable.Option[[]bool], bool) { + return v.val, true +} + +type normalIntNillableArray struct { + baseNillableArrayNormalValue[[]int64] +} + +func (v normalIntNillableArray) IntNillableArray() (immutable.Option[[]int64], bool) { + return v.val, true +} + +type normalFloatNillableArray struct { + baseNillableArrayNormalValue[[]float64] +} + +func (v normalFloatNillableArray) FloatNillableArray() (immutable.Option[[]float64], bool) { + return v.val, true +} + +type normalStringNillableArray struct { + baseNillableArrayNormalValue[[]string] +} + +func (v normalStringNillableArray) StringNillableArray() (immutable.Option[[]string], bool) { + return v.val, true +} + +type normalBytesNillableArray struct { + baseNillableArrayNormalValue[[][]byte] +} + +func (v normalBytesNillableArray) BytesNillableArray() (immutable.Option[[][]byte], bool) { + return v.val, true +} + +type normalTimeNillableArray struct { + baseNillableArrayNormalValue[[]time.Time] +} + +func (v normalTimeNillableArray) TimeNillableArray() (immutable.Option[[]time.Time], bool) { + return v.val, true +} + +type normalDocumentNillableArray struct { + baseNillableArrayNormalValue[[]*Document] +} + +func (v normalDocumentNillableArray) DocumentNillableArray() (immutable.Option[[]*Document], bool) { + return v.val, true +} + +// NewNormalNillableBoolArray creates a new NormalValue that represents a `immutable.Option[[]bool]` value. +func NewNormalBoolNillableArray(val immutable.Option[[]bool]) NormalValue { + return normalBoolNillableArray{newBaseNillableArrayNormalValue(val)} +} + +// NewNormalNillableIntArray creates a new NormalValue that represents a `immutable.Option[[]int64]` value. +func NewNormalIntNillableArray[T constraints.Integer | constraints.Float](val immutable.Option[[]T]) NormalValue { + return normalIntNillableArray{newBaseNillableArrayNormalValue(normalizeNumNillableArr[int64](val))} +} + +// NewNormalNillableFloatArray creates a new NormalValue that represents a `immutable.Option[[]float64]` value. +func NewNormalFloatNillableArray[T constraints.Integer | constraints.Float](val immutable.Option[[]T]) NormalValue { + return normalFloatNillableArray{newBaseNillableArrayNormalValue(normalizeNumNillableArr[float64](val))} +} + +// NewNormalNillableStringArray creates a new NormalValue that represents a `immutable.Option[[]string]` value. +func NewNormalStringNillableArray[T string | []byte](val immutable.Option[[]T]) NormalValue { + return normalStringNillableArray{newBaseNillableArrayNormalValue(normalizeCharsNillableArr[string](val))} +} + +// NewNormalNillableBytesArray creates a new NormalValue that represents a `immutable.Option[[][]byte]` value. +func NewNormalBytesNillableArray[T string | []byte](val immutable.Option[[]T]) NormalValue { + return normalBytesNillableArray{newBaseNillableArrayNormalValue(normalizeCharsNillableArr[[]byte](val))} +} + +// NewNormalNillableTimeArray creates a new NormalValue that represents a `immutable.Option[[]time.Time]` value. +func NewNormalTimeNillableArray(val immutable.Option[[]time.Time]) NormalValue { + return normalTimeNillableArray{newBaseNillableArrayNormalValue(val)} +} + +// NewNormalNillableDocumentArray creates a new NormalValue that represents a `immutable.Option[[]*Document]` value. +func NewNormalDocumentNillableArray(val immutable.Option[[]*Document]) NormalValue { + return normalDocumentNillableArray{newBaseNillableArrayNormalValue(val)} +} + +func normalizeNumNillableArr[R int64 | float64, T constraints.Integer | constraints.Float]( + val immutable.Option[[]T], +) immutable.Option[[]R] { + if val.HasValue() { + return immutable.Some(normalizeNumArr[R](val.Value())) + } + return immutable.None[[]R]() +} + +func normalizeCharsNillableArr[R string | []byte, T string | []byte](val immutable.Option[[]T]) immutable.Option[[]R] { + if val.HasValue() { + return immutable.Some(normalizeCharsArr[R](val.Value())) + } + return immutable.None[[]R]() +} diff --git a/client/normal_nillable_array_of_nillables.go b/client/normal_nillable_array_of_nillables.go new file mode 100644 index 0000000000..3594186ba2 --- /dev/null +++ b/client/normal_nillable_array_of_nillables.go @@ -0,0 +1,160 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package client + +import ( + "time" + + "github.com/sourcenetwork/immutable" + "golang.org/x/exp/constraints" +) + +type normalNillableBoolNillableArray struct { + baseNillableArrayNormalValue[[]immutable.Option[bool]] +} + +func (v normalNillableBoolNillableArray) NillableBoolNillableArray() ( + immutable.Option[[]immutable.Option[bool]], bool, +) { + return v.val, true +} + +type normalNillableIntNillableArray struct { + baseNillableArrayNormalValue[[]immutable.Option[int64]] +} + +func (v normalNillableIntNillableArray) NillableIntNillableArray() ( + immutable.Option[[]immutable.Option[int64]], bool, +) { + return v.val, true +} + +type normalNillableFloatNillableArray struct { + baseNillableArrayNormalValue[[]immutable.Option[float64]] +} + +func (v normalNillableFloatNillableArray) NillableFloatNillableArray() ( + immutable.Option[[]immutable.Option[float64]], bool, +) { + return v.val, true +} + +type normalNillableStringNillableArray struct { + baseNillableArrayNormalValue[[]immutable.Option[string]] +} + +func (v normalNillableStringNillableArray) NillableStringNillableArray() ( + immutable.Option[[]immutable.Option[string]], bool, +) { + return v.val, true +} + +type normalNillableBytesNillableArray struct { + baseNillableArrayNormalValue[[]immutable.Option[[]byte]] +} + +func (v normalNillableBytesNillableArray) NillableBytesNillableArray() ( + immutable.Option[[]immutable.Option[[]byte]], bool, +) { + return v.val, true +} + +type normalNillableTimeNillableArray struct { + baseNillableArrayNormalValue[[]immutable.Option[time.Time]] +} + +func (v normalNillableTimeNillableArray) NillableTimeNillableArray() ( + immutable.Option[[]immutable.Option[time.Time]], bool, +) { + return v.val, true +} + +type normalNillableDocumentNillableArray struct { + baseNillableArrayNormalValue[[]immutable.Option[*Document]] +} + +func (v normalNillableDocumentNillableArray) NillableDocumentNillableArray() ( + immutable.Option[[]immutable.Option[*Document]], bool, +) { + return v.val, true +} + +// NewNormalNillableBoolNillableArray creates a new NormalValue that represents a +// `immutable.Option[[]immutable.Option[bool]]` value. +func NewNormalNillableBoolNillableArray(val immutable.Option[[]immutable.Option[bool]]) NormalValue { + return normalNillableBoolNillableArray{newBaseNillableArrayNormalValue(val)} +} + +// NewNormalNillableIntNillableArray creates a new NormalValue that represents a +// `immutable.Option[[]immutable.Option[int64]]` value. +func NewNormalNillableIntNillableArray[T constraints.Integer | constraints.Float]( + val immutable.Option[[]immutable.Option[T]], +) NormalValue { + return normalNillableIntNillableArray{ + newBaseNillableArrayNormalValue(normalizeNillableNumNillableArr[int64](val)), + } +} + +// NewNormalNillableFloatNillableArray creates a new NormalValue that represents a +// `immutable.Option[[]immutable.Option[float64]]` value. +func NewNormalNillableFloatNillableArray[T constraints.Integer | constraints.Float]( + val immutable.Option[[]immutable.Option[T]], +) NormalValue { + return normalNillableFloatNillableArray{ + newBaseNillableArrayNormalValue(normalizeNillableNumNillableArr[float64](val)), + } +} + +// NewNormalNillableStringNillableArray creates a new NormalValue that represents a +// `immutable.Option[[]immutable.Option[string]]` value. +func NewNormalNillableStringNillableArray[T string | []byte](val immutable.Option[[]immutable.Option[T]]) NormalValue { + return normalNillableStringNillableArray{ + newBaseNillableArrayNormalValue(normalizeNillableCharsNillableArr[string](val)), + } +} + +// NewNormalNillableBytesNillableArray creates a new NormalValue that represents a +// `immutable.Option[[]immutable.Option[[]byte]]` value. +func NewNormalNillableBytesNillableArray[T string | []byte](val immutable.Option[[]immutable.Option[T]]) NormalValue { + return normalNillableBytesNillableArray{ + newBaseNillableArrayNormalValue(normalizeNillableCharsNillableArr[[]byte](val)), + } +} + +// NewNormalNillableTimeNillableArray creates a new NormalValue that represents a +// `immutable.Option[[]immutable.Option[time.Time]]` value. +func NewNormalNillableTimeNillableArray(val immutable.Option[[]immutable.Option[time.Time]]) NormalValue { + return normalNillableTimeNillableArray{newBaseNillableArrayNormalValue(val)} +} + +// NewNormalNillableDocumentNillableArray creates a new NormalValue that represents a +// `immutable.Option[[]immutable.Option[*Document]]` value. +func NewNormalNillableDocumentNillableArray(val immutable.Option[[]immutable.Option[*Document]]) NormalValue { + return normalNillableDocumentNillableArray{newBaseNillableArrayNormalValue(val)} +} + +func normalizeNillableNumNillableArr[R int64 | float64, T constraints.Integer | constraints.Float]( + val immutable.Option[[]immutable.Option[T]], +) immutable.Option[[]immutable.Option[R]] { + if val.HasValue() { + return immutable.Some(normalizeNillableNumArr[R](val.Value())) + } + return immutable.None[[]immutable.Option[R]]() +} + +func normalizeNillableCharsNillableArr[R string | []byte, T string | []byte]( + val immutable.Option[[]immutable.Option[T]], +) immutable.Option[[]immutable.Option[R]] { + if val.HasValue() { + return immutable.Some(normalizeNillableCharsArr[R](val.Value())) + } + return immutable.None[[]immutable.Option[R]]() +} diff --git a/client/normal_nillable_scalar.go b/client/normal_nillable_scalar.go new file mode 100644 index 0000000000..88876c9d7e --- /dev/null +++ b/client/normal_nillable_scalar.go @@ -0,0 +1,148 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package client + +import ( + "time" + + "github.com/sourcenetwork/immutable" + "golang.org/x/exp/constraints" +) + +type baseNillableNormalValue[T any] struct { + baseNormalValue[immutable.Option[T]] +} + +func (v baseNillableNormalValue[T]) Unwrap() any { + if v.val.HasValue() { + return v.val.Value() + } + return nil +} + +func (v baseNillableNormalValue[T]) IsNil() bool { + return !v.val.HasValue() +} + +func (v baseNillableNormalValue[T]) IsNillable() bool { + return true +} + +func newBaseNillableNormalValue[T any](val immutable.Option[T]) baseNillableNormalValue[T] { + return baseNillableNormalValue[T]{newBaseNormalValue(val)} +} + +type normalNillableBool struct { + baseNillableNormalValue[bool] +} + +func (v normalNillableBool) NillableBool() (immutable.Option[bool], bool) { + return v.val, true +} + +type normalNillableInt struct { + baseNillableNormalValue[int64] +} + +func (v normalNillableInt) NillableInt() (immutable.Option[int64], bool) { + return v.val, true +} + +type normalNillableFloat struct { + baseNillableNormalValue[float64] +} + +func (v normalNillableFloat) NillableFloat() (immutable.Option[float64], bool) { + return v.val, true +} + +type normalNillableString struct { + baseNillableNormalValue[string] +} + +func (v normalNillableString) NillableString() (immutable.Option[string], bool) { + return v.val, true +} + +type normalNillableBytes struct { + baseNillableNormalValue[[]byte] +} + +func (v normalNillableBytes) NillableBytes() (immutable.Option[[]byte], bool) { + return v.val, true +} + +type normalNillableTime struct { + baseNillableNormalValue[time.Time] +} + +func (v normalNillableTime) NillableTime() (immutable.Option[time.Time], bool) { + return v.val, true +} + +type normalNillableDocument struct { + baseNillableNormalValue[*Document] +} + +func (v normalNillableDocument) NillableDocument() (immutable.Option[*Document], bool) { + return v.val, true +} + +// NewNormalNillableBool creates a new NormalValue that represents a `immutable.Option[bool]` value. +func NewNormalNillableBool(val immutable.Option[bool]) NormalValue { + return normalNillableBool{newBaseNillableNormalValue(val)} +} + +// NewNormalNillableInt creates a new NormalValue that represents a `immutable.Option[int64]` value. +func NewNormalNillableInt[T constraints.Integer | constraints.Float](val immutable.Option[T]) NormalValue { + return normalNillableInt{newBaseNillableNormalValue(normalizeNillableNum[int64](val))} +} + +// NewNormalNillableFloat creates a new NormalValue that represents a `immutable.Option[float64]` value. +func NewNormalNillableFloat[T constraints.Integer | constraints.Float](val immutable.Option[T]) NormalValue { + return normalNillableFloat{newBaseNillableNormalValue(normalizeNillableNum[float64](val))} +} + +// NewNormalNillableString creates a new NormalValue that represents a `immutable.Option[string]` value. +func NewNormalNillableString[T string | []byte](val immutable.Option[T]) NormalValue { + return normalNillableString{newBaseNillableNormalValue(normalizeNillableChars[string](val))} +} + +// NewNormalNillableBytes creates a new NormalValue that represents a `immutable.Option[[]byte]` value. +func NewNormalNillableBytes[T string | []byte](val immutable.Option[T]) NormalValue { + return normalNillableBytes{newBaseNillableNormalValue(normalizeNillableChars[[]byte](val))} +} + +// NewNormalNillableTime creates a new NormalValue that represents a `immutable.Option[time.Time]` value. +func NewNormalNillableTime(val immutable.Option[time.Time]) NormalValue { + return normalNillableTime{newBaseNillableNormalValue(val)} +} + +// NewNormalNillableDocument creates a new NormalValue that represents a `immutable.Option[*Document]` value. +func NewNormalNillableDocument(val immutable.Option[*Document]) NormalValue { + return normalNillableDocument{newBaseNillableNormalValue(val)} +} + +func normalizeNillableNum[R int64 | float64, T constraints.Integer | constraints.Float]( + val immutable.Option[T], +) immutable.Option[R] { + if val.HasValue() { + return immutable.Some(R(val.Value())) + } + return immutable.None[R]() +} + +func normalizeNillableChars[R string | []byte, T string | []byte](val immutable.Option[T]) immutable.Option[R] { + if val.HasValue() { + return immutable.Some(R(val.Value())) + } + return immutable.None[R]() +} diff --git a/client/normal_scalar.go b/client/normal_scalar.go new file mode 100644 index 0000000000..f4378f5474 --- /dev/null +++ b/client/normal_scalar.go @@ -0,0 +1,130 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package client + +import ( + "time" + + "golang.org/x/exp/constraints" +) + +// NormalValue is dummy implementation of NormalValue to be embedded in other types. +type baseNormalValue[T any] struct { + NormalVoid + val T +} + +func (v baseNormalValue[T]) Unwrap() any { + return v.val +} + +func newBaseNormalValue[T any](val T) baseNormalValue[T] { + return baseNormalValue[T]{val: val} +} + +type normalBool struct { + baseNormalValue[bool] +} + +func (v normalBool) Bool() (bool, bool) { + return v.val, true +} + +type normalInt struct { + baseNormalValue[int64] +} + +func (v normalInt) Int() (int64, bool) { + return v.val, true +} + +type normalFloat struct { + baseNormalValue[float64] +} + +func (v normalFloat) Float() (float64, bool) { + return v.val, true +} + +type normalString struct { + baseNormalValue[string] +} + +func (v normalString) String() (string, bool) { + return v.val, true +} + +type normalBytes struct { + baseNormalValue[[]byte] +} + +func (v normalBytes) Bytes() ([]byte, bool) { + return v.val, true +} + +type normalTime struct { + baseNormalValue[time.Time] +} + +func (v normalTime) Time() (time.Time, bool) { + return v.val, true +} + +type normalDocument struct { + baseNormalValue[*Document] +} + +func (v normalDocument) Document() (*Document, bool) { + return v.val, true +} + +func newNormalInt(val int64) NormalValue { + return normalInt{newBaseNormalValue(val)} +} + +func newNormalFloat(val float64) NormalValue { + return normalFloat{newBaseNormalValue(val)} +} + +// NewNormalBool creates a new NormalValue that represents a `bool` value. +func NewNormalBool(val bool) NormalValue { + return normalBool{baseNormalValue[bool]{val: val}} +} + +// NewNormalInt creates a new NormalValue that represents an `int64` value. +func NewNormalInt[T constraints.Integer | constraints.Float](val T) NormalValue { + return normalInt{baseNormalValue[int64]{val: int64(val)}} +} + +// NewNormalFloat creates a new NormalValue that represents a `float64` value. +func NewNormalFloat[T constraints.Integer | constraints.Float](val T) NormalValue { + return normalFloat{baseNormalValue[float64]{val: float64(val)}} +} + +// NewNormalString creates a new NormalValue that represents a `string` value. +func NewNormalString[T string | []byte](val T) NormalValue { + return normalString{baseNormalValue[string]{val: string(val)}} +} + +// NewNormalBytes creates a new NormalValue that represents a `[]byte` value. +func NewNormalBytes[T string | []byte](val T) NormalValue { + return normalBytes{baseNormalValue[[]byte]{val: []byte(val)}} +} + +// NewNormalTime creates a new NormalValue that represents a `time.Time` value. +func NewNormalTime(val time.Time) NormalValue { + return normalTime{baseNormalValue[time.Time]{val: val}} +} + +// NewNormalDocument creates a new NormalValue that represents a `*Document` value. +func NewNormalDocument(val *Document) NormalValue { + return normalDocument{baseNormalValue[*Document]{val: val}} +} diff --git a/client/normal_util.go b/client/normal_util.go new file mode 100644 index 0000000000..87310d9631 --- /dev/null +++ b/client/normal_util.go @@ -0,0 +1,118 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package client + +// ToArrayOfNormalValues converts a NormalValue into a slice of NormalValue if the given value +// is an array. If the given value is not an array, an error is returned. +func ToArrayOfNormalValues(val NormalValue) ([]NormalValue, error) { + if !val.IsArray() { + return nil, NewCanNotTurnNormalValueIntoArray(val) + } + if !val.IsNillable() { + if v, ok := val.BoolArray(); ok { + return toNormalArray(v, NewNormalBool), nil + } + if v, ok := val.IntArray(); ok { + return toNormalArray(v, NewNormalInt), nil + } + if v, ok := val.FloatArray(); ok { + return toNormalArray(v, NewNormalFloat), nil + } + if v, ok := val.StringArray(); ok { + return toNormalArray(v, NewNormalString), nil + } + if v, ok := val.BytesArray(); ok { + return toNormalArray(v, NewNormalBytes), nil + } + if v, ok := val.TimeArray(); ok { + return toNormalArray(v, NewNormalTime), nil + } + if v, ok := val.DocumentArray(); ok { + return toNormalArray(v, NewNormalDocument), nil + } + if v, ok := val.NillableBoolArray(); ok { + return toNormalArray(v, NewNormalNillableBool), nil + } + if v, ok := val.NillableIntArray(); ok { + return toNormalArray(v, NewNormalNillableInt), nil + } + if v, ok := val.NillableFloatArray(); ok { + return toNormalArray(v, NewNormalNillableFloat), nil + } + if v, ok := val.NillableStringArray(); ok { + return toNormalArray(v, NewNormalNillableString), nil + } + if v, ok := val.NillableBytesArray(); ok { + return toNormalArray(v, NewNormalNillableBytes), nil + } + if v, ok := val.NillableTimeArray(); ok { + return toNormalArray(v, NewNormalNillableTime), nil + } + if v, ok := val.NillableDocumentArray(); ok { + return toNormalArray(v, NewNormalNillableDocument), nil + } + } else { + if val.IsNil() { + return nil, nil + } + if v, ok := val.NillableBoolNillableArray(); ok { + return toNormalArray(v.Value(), NewNormalNillableBool), nil + } + if v, ok := val.NillableIntNillableArray(); ok { + return toNormalArray(v.Value(), NewNormalNillableInt), nil + } + if v, ok := val.NillableFloatNillableArray(); ok { + return toNormalArray(v.Value(), NewNormalNillableFloat), nil + } + if v, ok := val.NillableStringNillableArray(); ok { + return toNormalArray(v.Value(), NewNormalNillableString), nil + } + if v, ok := val.NillableBytesNillableArray(); ok { + return toNormalArray(v.Value(), NewNormalNillableBytes), nil + } + if v, ok := val.NillableTimeNillableArray(); ok { + return toNormalArray(v.Value(), NewNormalNillableTime), nil + } + if v, ok := val.NillableDocumentNillableArray(); ok { + return toNormalArray(v.Value(), NewNormalNillableDocument), nil + } + if v, ok := val.BoolNillableArray(); ok { + return toNormalArray(v.Value(), NewNormalBool), nil + } + if v, ok := val.IntNillableArray(); ok { + return toNormalArray(v.Value(), NewNormalInt), nil + } + if v, ok := val.FloatNillableArray(); ok { + return toNormalArray(v.Value(), NewNormalFloat), nil + } + if v, ok := val.StringNillableArray(); ok { + return toNormalArray(v.Value(), NewNormalString), nil + } + if v, ok := val.BytesNillableArray(); ok { + return toNormalArray(v.Value(), NewNormalBytes), nil + } + if v, ok := val.TimeNillableArray(); ok { + return toNormalArray(v.Value(), NewNormalTime), nil + } + if v, ok := val.DocumentNillableArray(); ok { + return toNormalArray(v.Value(), NewNormalDocument), nil + } + } + return nil, NewCanNotTurnNormalValueIntoArray(val) +} + +func toNormalArray[T any](val []T, f func(T) NormalValue) []NormalValue { + res := make([]NormalValue, len(val)) + for i := range val { + res[i] = f(val[i]) + } + return res +} diff --git a/client/normal_value.go b/client/normal_value.go new file mode 100644 index 0000000000..3f0681fbfc --- /dev/null +++ b/client/normal_value.go @@ -0,0 +1,207 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package client + +import ( + "time" + + "github.com/sourcenetwork/immutable" +) + +// NormalValue is the interface for the normal value types. +// It is used to represent the normal (or standard) values across the system and to avoid +// asserting all possible types like int, int32, int64, etc. +// +// All methods returning a specific type returns the value and the second boolean flag indicating +// if the value is of the requested type. They act similar to Go's type assertion. +// +// All nillable values are represented as [immutable.Option[T]]. +type NormalValue interface { + // Unwrap returns the underlying value. + // For not nillable values it will return the value as is. + // For nillable values (of type [immutable.Option[T]]) it will return the value itself + // if the option has value, otherwise it will return nil. + Unwrap() any + + // IsNil returns if the value is nil. For not nillable values it will always return false. + IsNil() bool + // IsNillable returns if the value can be nil. + IsNillable() bool + // IsArray returns if the value is an array. + IsArray() bool + + // Bool returns the value as a bool. The second return flag is true if the value is a bool. + // Otherwise it will return false and false. + Bool() (bool, bool) + // Int returns the value as an int64. The second return flag is true if the value is an int64. + // Otherwise it will return 0 and false. + Int() (int64, bool) + // Float returns the value as a float64. The second return flag is true if the value is a float64. + // Otherwise it will return 0 and false. + Float() (float64, bool) + // String returns the value as a string. The second return flag is true if the value is a string. + // Otherwise it will return "" and false. + String() (string, bool) + // Bytes returns the value as a []byte. The second return flag is true if the value is a []byte. + // Otherwise it will return nil and false. + Bytes() ([]byte, bool) + // Time returns the value as a [time.Time]. The second return flag is true if the value is a [time.Time]. + // Otherwise it will return nil and false. + Time() (time.Time, bool) + // Document returns the value as a [*Document]. The second return flag is true if the value is a [*Document]. + // Otherwise it will return nil and false. + Document() (*Document, bool) + + // NillableBool returns the value as a nillable bool. + // The second return flag is true if the value is [immutable.Option[bool]]. + // Otherwise it will return [immutable.None[bool]()] and false. + NillableBool() (immutable.Option[bool], bool) + // NillableInt returns the value as a nillable int64. + // The second return flag is true if the value is [immutable.Option[int64]]. + // Otherwise it will return [immutable.None[int64]()] and false. + NillableInt() (immutable.Option[int64], bool) + // NillableFloat returns the value as a nillable float64. + // The second return flag is true if the value is [immutable.Option[float64]]. + // Otherwise it will return [immutable.None[float64]()] and false. + NillableFloat() (immutable.Option[float64], bool) + // NillableString returns the value as a nillable string. + // The second return flag is true if the value is [immutable.Option[string]]. + // Otherwise it will return [immutable.None[string]()] and false. + NillableString() (immutable.Option[string], bool) + // NillableBytes returns the value as a nillable byte slice. + // The second return flag is true if the value is [immutable.Option[[]byte]]. + // Otherwise it will return [immutable.None[[]byte]()] and false. + NillableBytes() (immutable.Option[[]byte], bool) + // NillableTime returns the value as a nillable time.Time. + // The second return flag is true if the value is [immutable.Option[time.Time]]. + // Otherwise it will return [immutable.None[time.Time]()] and false. + NillableTime() (immutable.Option[time.Time], bool) + // NillableDocument returns the value as a nillable *Document. + // The second return flag is true if the value is [immutable.Option[*Document]]. + // Otherwise it will return [immutable.None[*Document]()] and false. + NillableDocument() (immutable.Option[*Document], bool) + + // BoolArray returns the value as a bool array. + // The second return flag is true if the value is a []bool. + // Otherwise it will return nil and false. + BoolArray() ([]bool, bool) + // IntArray returns the value as an int64 array. + // The second return flag is true if the value is a []int64. + // Otherwise it will return nil and false. + IntArray() ([]int64, bool) + // FloatArray returns the value as a float64 array. + // The second return flag is true if the value is a []float64. + // Otherwise it will return nil and false. + FloatArray() ([]float64, bool) + // StringArray returns the value as a string array. + // The second return flag is true if the value is a []string. + // Otherwise it will return nil and false. + StringArray() ([]string, bool) + // BytesArray returns the value as a byte slice array. + // The second return flag is true if the value is a [][]byte. + // Otherwise it will return nil and false. + BytesArray() ([][]byte, bool) + // TimeArray returns the value as a time.Time array. + // The second return flag is true if the value is a [[]time.Time]. + // Otherwise it will return nil and false. + TimeArray() ([]time.Time, bool) + // DocumentArray returns the value as a [*Document] array. + // The second return flag is true if the value is a [[]*Document]. + // Otherwise it will return nil and false. + DocumentArray() ([]*Document, bool) + + // NillableBoolArray returns the value as nillable array of bool elements. + // The second return flag is true if the value is [immutable.Option[[]bool]]. + // Otherwise it will return [immutable.None[[]bool]()] and false. + BoolNillableArray() (immutable.Option[[]bool], bool) + // NillableIntArray returns the value as nillable array of int64 elements. + // The second return flag is true if the value is [immutable.Option[[]int64]]. + // Otherwise it will return [immutable.None[[]int64]()] and false. + IntNillableArray() (immutable.Option[[]int64], bool) + // NillableFloatArray returns the value as nillable array of float64 elements. + // The second return flag is true if the value is [immutable.Option[[]float64]]. + // Otherwise it will return [immutable.None[[]float64]()] and false. + FloatNillableArray() (immutable.Option[[]float64], bool) + // NillableStringArray returns the value as nillable array of string elements. + // The second return flag is true if the value is [immutable.Option[[]string]]. + // Otherwise it will return [immutable.None[[]string]()] and false. + StringNillableArray() (immutable.Option[[]string], bool) + // NillableBytesArray returns the value as nillable array of byte slice elements. + // The second return flag is true if the value is [immutable.Option[[][]byte]]. + // Otherwise it will return [immutable.None[[][]byte]()] and false. + BytesNillableArray() (immutable.Option[[][]byte], bool) + // NillableTimeArray returns the value as nillable array of [time.Time] elements. + // The second return flag is true if the value is [immutable.Option[[]time.Time]]. + // Otherwise it will return [immutable.None[[]time.Time]()] and false. + TimeNillableArray() (immutable.Option[[]time.Time], bool) + // NillableDocumentArray returns the value as nillable array of [*Document] elements. + // The second return flag is true if the value is [immutable.Option[[]*Document]]. + // Otherwise it will return [immutable.None[[]*Document]()] and false. + DocumentNillableArray() (immutable.Option[[]*Document], bool) + + // NillableBoolArray returns the value as array of nillable bool elements. + // The second return flag is true if the value is []immutable.Option[bool]. + // Otherwise it will return nil and false. + NillableBoolArray() ([]immutable.Option[bool], bool) + // NillableIntArray returns the value as array of nillable int64 elements. + // The second return flag is true if the value is []immutable.Option[int64]. + // Otherwise it will return nil and false. + NillableIntArray() ([]immutable.Option[int64], bool) + // NillableFloatArray returns the value as array of nillable float64 elements. + // The second return flag is true if the value is []immutable.Option[float64]. + // Otherwise it will return nil and false. + NillableFloatArray() ([]immutable.Option[float64], bool) + // NillableStringArray returns the value as array of nillable string elements. + // The second return flag is true if the value is []immutable.Option[string]. + // Otherwise it will return nil and false. + NillableStringArray() ([]immutable.Option[string], bool) + // NillableBytesArray returns the value as array of nillable byte slice elements. + // The second return flag is true if the value is []immutable.Option[[]byte]. + // Otherwise it will return nil and false. + NillableBytesArray() ([]immutable.Option[[]byte], bool) + // NillableTimeArray returns the value as array of nillable time.Time elements. + // The second return flag is true if the value is []immutable.Option[time.Time]. + // Otherwise it will return nil and false. + NillableTimeArray() ([]immutable.Option[time.Time], bool) + // NillableDocumentArray returns the value as array of nillable *Document elements. + // The second return flag is true if the value is []immutable.Option[*Document]. + // Otherwise it will return nil and false. + NillableDocumentArray() ([]immutable.Option[*Document], bool) + + // NillableBoolNillableArray returns the value as nillable array of nillable bool elements. + // The second return flag is true if the value is [immutable.Option[[]immutable.Option[bool]]]. + // Otherwise it will return [immutable.None[[]immutable.Option[bool]]()] and false. + NillableBoolNillableArray() (immutable.Option[[]immutable.Option[bool]], bool) + // NillableIntNillableArray returns the value as nillable array of nillable int64 elements. + // The second return flag is true if the value is [immutable.Option[[]immutable.Option[int64]]]. + // Otherwise it will return [immutable.None[[]immutable.Option[int64]]()] and false. + NillableIntNillableArray() (immutable.Option[[]immutable.Option[int64]], bool) + // NillableFloatNillableArray returns the value as nillable array of nillable float64 elements. + // The second return flag is true if the value is [immutable.Option[[]immutable.Option[float64]]]. + // Otherwise it will return [immutable.None[[]immutable.Option[float64]]()] and false. + NillableFloatNillableArray() (immutable.Option[[]immutable.Option[float64]], bool) + // NillableStringNillableArray returns the value as nillable array of nillable string elements. + // The second return flag is true if the value is [immutable.Option[[]immutable.Option[string]]]. + // Otherwise it will return [immutable.None[[]immutable.Option[string]]()] and false. + NillableStringNillableArray() (immutable.Option[[]immutable.Option[string]], bool) + // NillableBytesNillableArray returns the value as nillable array of nillable byte slice elements. + // The second return flag is true if the value is [immutable.Option[[]immutable.Option[[]byte]]]. + // Otherwise it will return [immutable.None[[]immutable.Option[[]byte]]()] and false. + NillableBytesNillableArray() (immutable.Option[[]immutable.Option[[]byte]], bool) + // NillableTimeNillableArray returns the value as nillable array of nillable time.Time elements. + // The second return flag is true if the value is [immutable.Option[[]immutable.Option[time.Time]]]. + // Otherwise it will return [immutable.None[[]immutable.Option[time.Time]]()] and false. + NillableTimeNillableArray() (immutable.Option[[]immutable.Option[time.Time]], bool) + // NillableDocumentNillableArray returns the value as nillable array of nillable *Document elements. + // The second return flag is true if the value is [immutable.Option[[]immutable.Option[*Document]]]. + // Otherwise it will return [immutable.None[[]immutable.Option[*Document]]()] and false. + NillableDocumentNillableArray() (immutable.Option[[]immutable.Option[*Document]], bool) +} diff --git a/client/normal_value_test.go b/client/normal_value_test.go new file mode 100644 index 0000000000..33cd20c46e --- /dev/null +++ b/client/normal_value_test.go @@ -0,0 +1,1649 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package client + +import ( + "reflect" + "testing" + "time" + + "github.com/sourcenetwork/immutable" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +type nType string + +const ( + BoolType nType = "Bool" + IntType nType = "Int" + FloatType nType = "Float" + StringType nType = "String" + BytesType nType = "Bytes" + TimeType nType = "Time" + DocumentType nType = "Document" + + NillableBoolType nType = "NillableBool" + NillableIntType nType = "NillableInt" + NillableFloatType nType = "NillableFloat" + NillableStringType nType = "NillableString" + NillableBytesType nType = "NillableBytes" + NillableTimeType nType = "NillableTime" + NillableDocumentType nType = "NillableDocument" + + BoolArray nType = "BoolArray" + IntArray nType = "IntArray" + FloatArray nType = "FloatArray" + StringArray nType = "StringArray" + BytesArray nType = "BytesArray" + TimeArray nType = "TimeArray" + DocumentArray nType = "DocumentArray" + + NillableBoolArray nType = "NillableBoolArray" + NillableIntArray nType = "NillableIntArray" + NillableFloatArray nType = "NillableFloatArray" + NillableStringArray nType = "NillableStringArray" + NillableBytesArray nType = "NillableBytesArray" + NillableTimeArray nType = "NillableTimeArray" + NillableDocumentArray nType = "NillableDocumentArray" + + BoolNillableArray nType = "BoolNillableArray" + IntNillableArray nType = "IntNillableArray" + FloatNillableArray nType = "FloatNillableArray" + StringNillableArray nType = "StringNillableArray" + BytesNillableArray nType = "BytesNillableArray" + TimeNillableArray nType = "TimeNillableArray" + DocumentNillableArray nType = "DocumentNillableArray" + + NillableBoolNillableArray nType = "NillableBoolNillableArray" + NillableIntNillableArray nType = "NillableIntNillableArray" + NillableFloatNillableArray nType = "NillableFloatNillableArray" + NillableStringNillableArray nType = "NillableStringNillableArray" + NillableBytesNillableArray nType = "NillableBytesNillableArray" + NillableTimeNillableArray nType = "NillableTimeNillableArray" + NillableDocumentNillableArray nType = "NillableDocumentNillableArray" +) + +// extractValue takes an input of type `any` and checks if it is an `Option[T]`. +// If it is and contains a value, it returns the contained value. +// Otherwise, it returns the input itself. +func extractValue(input any) any { + inputVal := reflect.ValueOf(input) + + // Check if the type is Option[T] by seeing if it has the HasValue and Value methods. + hasValueMethod := inputVal.MethodByName("HasValue") + valueMethod := inputVal.MethodByName("Value") + + if hasValueMethod.IsValid() && valueMethod.IsValid() { + // Call HasValue to check if there's a value. + hasValueResult := hasValueMethod.Call(nil) + if len(hasValueResult) == 1 { + if hasValueResult[0].Bool() { + // Call Value to get the actual value if HasValue is true. + valueResult := valueMethod.Call(nil) + if len(valueResult) == 1 { + return valueResult[0].Interface() + } + } else { + // Return nil if HasValue is false. + return nil + } + } + } + + // Return the input itself if it's not an Option[T] with a value. + return input +} + +func TestNormalValue_NewValueAndTypeAssertion(t *testing.T) { + typeAssertMap := map[nType]func(NormalValue) (any, bool){ + BoolType: func(v NormalValue) (any, bool) { return v.Bool() }, + IntType: func(v NormalValue) (any, bool) { return v.Int() }, + FloatType: func(v NormalValue) (any, bool) { return v.Float() }, + StringType: func(v NormalValue) (any, bool) { return v.String() }, + BytesType: func(v NormalValue) (any, bool) { return v.Bytes() }, + TimeType: func(v NormalValue) (any, bool) { return v.Time() }, + DocumentType: func(v NormalValue) (any, bool) { return v.Document() }, + + NillableBoolType: func(v NormalValue) (any, bool) { return v.NillableBool() }, + NillableIntType: func(v NormalValue) (any, bool) { return v.NillableInt() }, + NillableFloatType: func(v NormalValue) (any, bool) { return v.NillableFloat() }, + NillableStringType: func(v NormalValue) (any, bool) { return v.NillableString() }, + NillableBytesType: func(v NormalValue) (any, bool) { return v.NillableBytes() }, + NillableTimeType: func(v NormalValue) (any, bool) { return v.NillableTime() }, + NillableDocumentType: func(v NormalValue) (any, bool) { return v.NillableDocument() }, + + BoolArray: func(v NormalValue) (any, bool) { return v.BoolArray() }, + IntArray: func(v NormalValue) (any, bool) { return v.IntArray() }, + FloatArray: func(v NormalValue) (any, bool) { return v.FloatArray() }, + StringArray: func(v NormalValue) (any, bool) { return v.StringArray() }, + BytesArray: func(v NormalValue) (any, bool) { return v.BytesArray() }, + TimeArray: func(v NormalValue) (any, bool) { return v.TimeArray() }, + DocumentArray: func(v NormalValue) (any, bool) { return v.DocumentArray() }, + + BoolNillableArray: func(v NormalValue) (any, bool) { return v.BoolNillableArray() }, + IntNillableArray: func(v NormalValue) (any, bool) { return v.IntNillableArray() }, + FloatNillableArray: func(v NormalValue) (any, bool) { return v.FloatNillableArray() }, + StringNillableArray: func(v NormalValue) (any, bool) { return v.StringNillableArray() }, + BytesNillableArray: func(v NormalValue) (any, bool) { return v.BytesNillableArray() }, + TimeNillableArray: func(v NormalValue) (any, bool) { return v.TimeNillableArray() }, + DocumentNillableArray: func(v NormalValue) (any, bool) { return v.DocumentNillableArray() }, + + NillableBoolArray: func(v NormalValue) (any, bool) { return v.NillableBoolArray() }, + NillableIntArray: func(v NormalValue) (any, bool) { return v.NillableIntArray() }, + NillableFloatArray: func(v NormalValue) (any, bool) { return v.NillableFloatArray() }, + NillableStringArray: func(v NormalValue) (any, bool) { return v.NillableStringArray() }, + NillableBytesArray: func(v NormalValue) (any, bool) { return v.NillableBytesArray() }, + NillableTimeArray: func(v NormalValue) (any, bool) { return v.NillableTimeArray() }, + NillableDocumentArray: func(v NormalValue) (any, bool) { return v.NillableDocumentArray() }, + + NillableBoolNillableArray: func(v NormalValue) (any, bool) { return v.NillableBoolNillableArray() }, + NillableIntNillableArray: func(v NormalValue) (any, bool) { return v.NillableIntNillableArray() }, + NillableFloatNillableArray: func(v NormalValue) (any, bool) { return v.NillableFloatNillableArray() }, + NillableStringNillableArray: func(v NormalValue) (any, bool) { return v.NillableStringNillableArray() }, + NillableBytesNillableArray: func(v NormalValue) (any, bool) { return v.NillableBytesNillableArray() }, + NillableTimeNillableArray: func(v NormalValue) (any, bool) { return v.NillableTimeNillableArray() }, + NillableDocumentNillableArray: func(v NormalValue) (any, bool) { + return v.NillableDocumentNillableArray() + }, + } + + newMap := map[nType]func(any) NormalValue{ + BoolType: func(v any) NormalValue { return NewNormalBool(v.(bool)) }, + IntType: func(v any) NormalValue { return NewNormalInt(v.(int64)) }, + FloatType: func(v any) NormalValue { return NewNormalFloat(v.(float64)) }, + StringType: func(v any) NormalValue { return NewNormalString(v.(string)) }, + BytesType: func(v any) NormalValue { return NewNormalBytes(v.([]byte)) }, + TimeType: func(v any) NormalValue { return NewNormalTime(v.(time.Time)) }, + DocumentType: func(v any) NormalValue { return NewNormalDocument(v.(*Document)) }, + + NillableBoolType: func(v any) NormalValue { return NewNormalNillableBool(v.(immutable.Option[bool])) }, + NillableIntType: func(v any) NormalValue { return NewNormalNillableInt(v.(immutable.Option[int64])) }, + NillableFloatType: func(v any) NormalValue { return NewNormalNillableFloat(v.(immutable.Option[float64])) }, + NillableStringType: func(v any) NormalValue { return NewNormalNillableString(v.(immutable.Option[string])) }, + NillableBytesType: func(v any) NormalValue { return NewNormalNillableBytes(v.(immutable.Option[[]byte])) }, + NillableTimeType: func(v any) NormalValue { return NewNormalNillableTime(v.(immutable.Option[time.Time])) }, + NillableDocumentType: func(v any) NormalValue { return NewNormalNillableDocument(v.(immutable.Option[*Document])) }, + + BoolArray: func(v any) NormalValue { return NewNormalBoolArray(v.([]bool)) }, + IntArray: func(v any) NormalValue { return NewNormalIntArray(v.([]int64)) }, + FloatArray: func(v any) NormalValue { return NewNormalFloatArray(v.([]float64)) }, + StringArray: func(v any) NormalValue { return NewNormalStringArray(v.([]string)) }, + BytesArray: func(v any) NormalValue { return NewNormalBytesArray(v.([][]byte)) }, + TimeArray: func(v any) NormalValue { return NewNormalTimeArray(v.([]time.Time)) }, + DocumentArray: func(v any) NormalValue { return NewNormalDocumentArray(v.([]*Document)) }, + + NillableBoolArray: func(v any) NormalValue { + return NewNormalNillableBoolArray(v.([]immutable.Option[bool])) + }, + NillableIntArray: func(v any) NormalValue { + return NewNormalNillableIntArray(v.([]immutable.Option[int64])) + }, + NillableFloatArray: func(v any) NormalValue { + return NewNormalNillableFloatArray(v.([]immutable.Option[float64])) + }, + NillableStringArray: func(v any) NormalValue { + return NewNormalNillableStringArray(v.([]immutable.Option[string])) + }, + NillableBytesArray: func(v any) NormalValue { + return NewNormalNillableBytesArray(v.([]immutable.Option[[]byte])) + }, + NillableTimeArray: func(v any) NormalValue { + return NewNormalNillableTimeArray(v.([]immutable.Option[time.Time])) + }, + NillableDocumentArray: func(v any) NormalValue { + return NewNormalNillableDocumentArray(v.([]immutable.Option[*Document])) + }, + + BoolNillableArray: func(v any) NormalValue { + return NewNormalBoolNillableArray(v.(immutable.Option[[]bool])) + }, + IntNillableArray: func(v any) NormalValue { + return NewNormalIntNillableArray(v.(immutable.Option[[]int64])) + }, + FloatNillableArray: func(v any) NormalValue { + return NewNormalFloatNillableArray(v.(immutable.Option[[]float64])) + }, + StringNillableArray: func(v any) NormalValue { + return NewNormalStringNillableArray(v.(immutable.Option[[]string])) + }, + BytesNillableArray: func(v any) NormalValue { + return NewNormalBytesNillableArray(v.(immutable.Option[[][]byte])) + }, + TimeNillableArray: func(v any) NormalValue { + return NewNormalTimeNillableArray(v.(immutable.Option[[]time.Time])) + }, + DocumentNillableArray: func(v any) NormalValue { + return NewNormalDocumentNillableArray(v.(immutable.Option[[]*Document])) + }, + + NillableBoolNillableArray: func(v any) NormalValue { + return NewNormalNillableBoolNillableArray(v.(immutable.Option[[]immutable.Option[bool]])) + }, + NillableIntNillableArray: func(v any) NormalValue { + return NewNormalNillableIntNillableArray(v.(immutable.Option[[]immutable.Option[int64]])) + }, + NillableFloatNillableArray: func(v any) NormalValue { + return NewNormalNillableFloatNillableArray(v.(immutable.Option[[]immutable.Option[float64]])) + }, + NillableStringNillableArray: func(v any) NormalValue { + return NewNormalNillableStringNillableArray(v.(immutable.Option[[]immutable.Option[string]])) + }, + NillableBytesNillableArray: func(v any) NormalValue { + return NewNormalNillableBytesNillableArray(v.(immutable.Option[[]immutable.Option[[]byte]])) + }, + NillableTimeNillableArray: func(v any) NormalValue { + return NewNormalNillableTimeNillableArray(v.(immutable.Option[[]immutable.Option[time.Time]])) + }, + NillableDocumentNillableArray: func(v any) NormalValue { + return NewNormalNillableDocumentNillableArray(v.(immutable.Option[[]immutable.Option[*Document]])) + }, + } + + tests := []struct { + nType nType + input any + isNillable bool + isNil bool + isArray bool + }{ + { + nType: BoolType, + input: true, + }, + { + nType: IntType, + input: int64(1), + }, + { + nType: FloatType, + input: float64(1), + }, + { + nType: StringType, + input: "test", + }, + { + nType: BytesType, + input: []byte{1, 2, 3}, + }, + { + nType: TimeType, + input: time.Now(), + }, + { + nType: DocumentType, + input: &Document{}, + }, + { + nType: NillableBoolType, + input: immutable.Some(true), + isNillable: true, + }, + { + nType: NillableBoolType, + input: immutable.None[bool](), + isNil: true, + isNillable: true, + }, + { + nType: NillableIntType, + input: immutable.Some(int64(1)), + isNillable: true, + }, + { + nType: NillableIntType, + input: immutable.None[int64](), + isNil: true, + isNillable: true, + }, + { + nType: NillableFloatType, + input: immutable.Some(float64(1)), + isNillable: true, + }, + { + nType: NillableFloatType, + input: immutable.None[float64](), + isNil: true, + isNillable: true, + }, + { + nType: NillableStringType, + input: immutable.Some("test"), + isNillable: true, + }, + { + nType: NillableStringType, + input: immutable.None[string](), + isNil: true, + isNillable: true, + }, + { + nType: NillableBytesType, + input: immutable.Some([]byte{1, 2, 3}), + isNillable: true, + }, + { + nType: NillableBytesType, + input: immutable.None[[]byte](), + isNil: true, + isNillable: true, + }, + { + nType: NillableTimeType, + input: immutable.Some(time.Now()), + isNillable: true, + }, + { + nType: NillableTimeType, + input: immutable.None[time.Time](), + isNil: true, + isNillable: true, + }, + { + nType: NillableDocumentType, + input: immutable.Some(&Document{}), + isNillable: true, + }, + { + nType: NillableDocumentType, + input: immutable.None[*Document](), + isNil: true, + isNillable: true, + }, + { + nType: BoolArray, + input: []bool{true, false}, + isArray: true, + }, + { + nType: IntArray, + input: []int64{1, 2, 3}, + isArray: true, + }, + { + nType: FloatArray, + input: []float64{1, 2, 3}, + isArray: true, + }, + { + nType: StringArray, + input: []string{"test", "test2"}, + isArray: true, + }, + { + nType: BytesArray, + input: [][]byte{{1, 2, 3}, {4, 5, 6}}, + isArray: true, + }, + { + nType: TimeArray, + input: []time.Time{time.Now(), time.Now()}, + isArray: true, + }, + { + nType: DocumentArray, + input: []*Document{{}, {}}, + isArray: true, + }, + { + nType: NillableBoolArray, + input: []immutable.Option[bool]{immutable.Some(true)}, + isArray: true, + }, + { + nType: NillableIntArray, + input: []immutable.Option[int64]{immutable.Some(int64(1))}, + isArray: true, + }, + { + nType: NillableFloatArray, + input: []immutable.Option[float64]{immutable.Some(float64(1))}, + isArray: true, + }, + { + nType: NillableStringArray, + input: []immutable.Option[string]{immutable.Some("test")}, + isArray: true, + }, + { + nType: NillableBytesArray, + input: []immutable.Option[[]byte]{immutable.Some([]byte{1, 2, 3})}, + isArray: true, + }, + { + nType: NillableTimeArray, + input: []immutable.Option[time.Time]{immutable.Some(time.Now())}, + isArray: true, + }, + { + nType: NillableDocumentArray, + input: []immutable.Option[*Document]{immutable.Some(&Document{})}, + isArray: true, + }, + { + nType: BoolNillableArray, + input: immutable.Some([]bool{true, false}), + isNillable: true, + isArray: true, + }, + { + nType: BoolNillableArray, + input: immutable.None[[]bool](), + isNillable: true, + isNil: true, + isArray: true, + }, + { + nType: IntNillableArray, + input: immutable.Some([]int64{1, 2, 3}), + isNillable: true, + isArray: true, + }, + { + nType: IntNillableArray, + input: immutable.None[[]int64](), + isNillable: true, + isNil: true, + isArray: true, + }, + { + nType: FloatNillableArray, + input: immutable.Some([]float64{1, 2, 3}), + isNillable: true, + isArray: true, + }, + { + nType: FloatNillableArray, + input: immutable.None[[]float64](), + isNillable: true, + isNil: true, + isArray: true, + }, + { + nType: StringNillableArray, + input: immutable.Some([]string{"test", "test2"}), + isNillable: true, + isArray: true, + }, + { + nType: StringNillableArray, + input: immutable.None[[]string](), + isNillable: true, + isNil: true, + isArray: true, + }, + { + nType: BytesNillableArray, + input: immutable.Some([][]byte{{1, 2, 3}, {4, 5, 6}}), + isNillable: true, + isArray: true, + }, + { + nType: BytesNillableArray, + input: immutable.None[[][]byte](), + isNillable: true, + isNil: true, + isArray: true, + }, + { + nType: TimeNillableArray, + input: immutable.Some([]time.Time{time.Now(), time.Now()}), + isNillable: true, + isArray: true, + }, + { + nType: TimeNillableArray, + input: immutable.None[[]time.Time](), + isNillable: true, + isNil: true, + isArray: true, + }, + { + nType: DocumentNillableArray, + input: immutable.Some([]*Document{{}, {}}), + isNillable: true, + isArray: true, + }, + { + nType: DocumentNillableArray, + input: immutable.None[[]*Document](), + isNillable: true, + isNil: true, + isArray: true, + }, + { + nType: NillableBoolNillableArray, + input: immutable.Some([]immutable.Option[bool]{immutable.Some(true)}), + isNillable: true, + isArray: true, + }, + { + nType: NillableBoolNillableArray, + input: immutable.None[[]immutable.Option[bool]](), + isNillable: true, + isNil: true, + isArray: true, + }, + { + nType: NillableIntNillableArray, + input: immutable.Some([]immutable.Option[int64]{immutable.Some(int64(1))}), + isNillable: true, + isArray: true, + }, + { + nType: NillableIntNillableArray, + input: immutable.None[[]immutable.Option[int64]](), + isNillable: true, + isNil: true, + isArray: true, + }, + { + nType: NillableFloatNillableArray, + input: immutable.Some([]immutable.Option[float64]{immutable.Some(float64(1))}), + isNillable: true, + isArray: true, + }, + { + nType: NillableFloatNillableArray, + input: immutable.None[[]immutable.Option[float64]](), + isNillable: true, + isNil: true, + isArray: true, + }, + { + nType: NillableStringNillableArray, + input: immutable.Some([]immutable.Option[string]{immutable.Some("test")}), + isNillable: true, + isArray: true, + }, + { + nType: NillableStringNillableArray, + input: immutable.None[[]immutable.Option[string]](), + isNillable: true, + isNil: true, + isArray: true, + }, + { + nType: NillableBytesNillableArray, + input: immutable.Some([]immutable.Option[[]byte]{immutable.Some([]byte{1, 2, 3})}), + isNillable: true, + isArray: true, + }, + { + nType: NillableBytesNillableArray, + input: immutable.None[[]immutable.Option[[]byte]](), + isNillable: true, + isNil: true, + isArray: true, + }, + { + nType: NillableTimeNillableArray, + input: immutable.Some([]immutable.Option[time.Time]{immutable.Some(time.Now())}), + isNillable: true, + isArray: true, + }, + { + nType: NillableTimeNillableArray, + input: immutable.None[[]immutable.Option[time.Time]](), + isNillable: true, + isNil: true, + isArray: true, + }, + { + nType: NillableDocumentNillableArray, + input: immutable.Some([]immutable.Option[*Document]{immutable.Some(&Document{})}), + isNillable: true, + isArray: true, + }, + } + + for _, tt := range tests { + tStr := string(tt.nType) + t.Run(tStr, func(t *testing.T) { + actual, err := NewNormalValue(tt.input) + require.NoError(t, err) + + for nType, typeAssertFunc := range typeAssertMap { + val, ok := typeAssertFunc(actual) + if nType == tt.nType { + assert.True(t, ok, tStr+"() should return true") + assert.Equal(t, tt.input, val, tStr+"() returned unexpected value") + newVal := newMap[nType](val) + assert.Equal(t, actual, newVal, "New"+tStr+"() returned unexpected NormalValue") + assert.Equal(t, extractValue(tt.input), actual.Unwrap(), + "Unwrap() returned unexpected value for "+tStr) + } else { + assert.False(t, ok, string(nType)+"() should return false for "+tStr) + } + } + + if tt.isNillable { + assert.True(t, actual.IsNillable(), "IsNillable() should return true for "+tStr) + } else { + assert.False(t, actual.IsNillable(), "IsNillable() should return false for "+tStr) + } + + if tt.isNil { + assert.True(t, actual.IsNil(), "IsNil() should return true for "+tStr) + } else { + assert.False(t, actual.IsNil(), "IsNil() should return false for "+tStr) + } + + if tt.isArray { + assert.True(t, actual.IsArray(), "IsArray() should return true for "+tStr) + } else { + assert.False(t, actual.IsArray(), "IsArray() should return false for "+tStr) + } + }) + } +} + +func TestNormalValue_InUnknownType_ReturnError(t *testing.T) { + _, err := NewNormalValue(struct{ name string }{}) + require.ErrorContains(t, err, errCanNotNormalizeValue) +} + +func TestNormalValue_NewNormalValueFromAnyArray(t *testing.T) { + now := time.Now() + doc1 := &Document{} + doc2 := &Document{} + + tests := []struct { + name string + input []any + expected NormalValue + err string + }{ + { + name: "nil input", + input: nil, + err: errCanNotNormalizeValue, + }, + { + name: "unknown element type", + input: []any{struct{ name string }{}}, + err: errCanNotNormalizeValue, + }, + { + name: "mixed elements type", + input: []any{1, "test", true}, + err: errCanNotNormalizeValue, + }, + { + name: "bool elements", + input: []any{true, false}, + expected: NewNormalBoolArray([]bool{true, false}), + }, + { + name: "int elements", + input: []any{int64(1), int64(2)}, + expected: NewNormalIntArray([]int64{1, 2}), + }, + { + name: "float elements", + input: []any{float64(1), float64(2)}, + expected: NewNormalFloatArray([]float64{1, 2}), + }, + { + name: "string elements", + input: []any{"test", "test2"}, + expected: NewNormalStringArray([]string{"test", "test2"}), + }, + { + name: "bytes elements", + input: []any{[]byte{1, 2, 3}, []byte{4, 5, 6}}, + expected: NewNormalBytesArray([][]byte{{1, 2, 3}, {4, 5, 6}}), + }, + { + name: "time elements", + input: []any{now, now}, + expected: NewNormalTimeArray([]time.Time{now, now}), + }, + { + name: "document elements", + input: []any{doc1, doc2}, + expected: NewNormalDocumentArray([]*Document{doc1, doc2}), + }, + { + name: "bool and nil elements", + input: []any{true, nil, false}, + expected: NewNormalNillableBoolArray( + []immutable.Option[bool]{immutable.Some(true), immutable.None[bool](), immutable.Some(false)}, + ), + }, + { + name: "int and nil elements", + input: []any{1, nil, 2}, + expected: NewNormalNillableIntArray( + []immutable.Option[int64]{immutable.Some(int64(1)), immutable.None[int64](), immutable.Some(int64(2))}, + ), + }, + { + name: "float and nil elements", + input: []any{1.0, nil, 2.0}, + expected: NewNormalNillableFloatArray( + []immutable.Option[float64]{immutable.Some(1.0), immutable.None[float64](), immutable.Some(2.0)}, + ), + }, + { + name: "string and nil elements", + input: []any{"test", nil, "test2"}, + expected: NewNormalNillableStringArray( + []immutable.Option[string]{immutable.Some("test"), immutable.None[string](), immutable.Some("test2")}, + ), + }, + { + name: "bytes and nil elements", + input: []any{[]byte{1, 2, 3}, nil, []byte{4, 5, 6}}, + expected: NewNormalNillableBytesArray( + []immutable.Option[[]byte]{ + immutable.Some([]byte{1, 2, 3}), + immutable.None[[]byte](), + immutable.Some([]byte{4, 5, 6}), + }, + ), + }, + { + name: "time and nil elements", + input: []any{now, nil, now}, + expected: NewNormalNillableTimeArray( + []immutable.Option[time.Time]{immutable.Some(now), immutable.None[time.Time](), immutable.Some(now)}, + ), + }, + { + name: "document and nil elements", + input: []any{doc1, nil, doc2}, + expected: NewNormalNillableDocumentArray( + []immutable.Option[*Document]{immutable.Some(doc1), immutable.None[*Document](), immutable.Some(doc2)}, + ), + }, + { + name: "mixed int elements", + input: []any{int8(1), int16(2), int32(3), int64(4), int(5), uint8(6), uint16(7), uint32(8), + uint64(9), uint(10)}, + expected: NewNormalIntArray([]int64{1, 2, 3, 4, 5, 6, 7, 8, 9, 10}), + }, + { + name: "mixed float elements", + input: []any{float32(1.5), float64(2.2)}, + expected: NewNormalFloatArray([]float64{1.5, 2.2}), + }, + { + name: "mixed number elements", + input: []any{int8(1), int16(2), int32(3), int64(4), int(5), uint8(6), uint16(7), uint32(8), + uint64(9), uint(10), float32(1.5), float64(2.2)}, + expected: NewNormalFloatArray([]float64{1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 1.5, 2.2}), + }, + { + name: "mixed int and nil elements", + input: []any{int8(1), nil, int16(2), int32(3), int64(4), int(5), uint8(6), uint16(7), uint32(8), + uint64(9), nil, uint(10)}, + expected: NewNormalNillableIntArray( + []immutable.Option[int64]{immutable.Some(int64(1)), immutable.None[int64](), immutable.Some(int64(2)), + immutable.Some(int64(3)), immutable.Some(int64(4)), immutable.Some(int64(5)), immutable.Some(int64(6)), + immutable.Some(int64(7)), immutable.Some(int64(8)), immutable.Some(int64(9)), immutable.None[int64](), + immutable.Some(int64(10))}, + ), + }, + { + name: "mixed float and nil elements", + input: []any{float32(1.5), nil, float64(2.2)}, + expected: NewNormalNillableFloatArray( + []immutable.Option[float64]{immutable.Some(1.5), immutable.None[float64](), immutable.Some(2.2)}, + ), + }, + { + name: "mixed number and nil elements", + input: []any{int8(1), nil, int16(2), int32(3), int64(4), int(5), uint8(6), uint16(7), uint32(8), + uint64(9), nil, uint(10), float32(1.5), nil, float64(2.2)}, + expected: NewNormalNillableFloatArray( + []immutable.Option[float64]{ + immutable.Some(1.0), immutable.None[float64](), immutable.Some(2.0), immutable.Some(3.0), + immutable.Some(4.0), immutable.Some(5.0), immutable.Some(6.0), immutable.Some(7.0), + immutable.Some(8.0), immutable.Some(9.0), immutable.None[float64](), immutable.Some(10.0), + immutable.Some(1.5), immutable.None[float64](), immutable.Some(2.2)}, + ), + }, + } + + for _, tt := range tests { + tStr := string(tt.name) + t.Run(tStr, func(t *testing.T) { + actual, err := NewNormalValue(tt.input) + if tt.err != "" { + require.ErrorContains(t, err, tt.err) + return + } + + assert.Equal(t, tt.expected, actual) + }) + } +} + +func TestNormalValue_NewNormalInt(t *testing.T) { + i64 := int64(2) + v := NewNormalInt(i64) + getInt := func(v NormalValue) int64 { i, _ := v.Int(); return i } + + assert.Equal(t, i64, getInt(v)) + + v = NewNormalInt(float32(2.5)) + assert.Equal(t, i64, getInt(v)) + + v = NewNormalInt(float64(2.5)) + assert.Equal(t, i64, getInt(v)) + + v = NewNormalInt(int8(2)) + assert.Equal(t, i64, getInt(v)) + + v = NewNormalInt(int16(2)) + assert.Equal(t, i64, getInt(v)) + + v = NewNormalInt(int32(2)) + assert.Equal(t, i64, getInt(v)) + + v = NewNormalInt(int(2)) + assert.Equal(t, i64, getInt(v)) + + v = NewNormalInt(uint8(2)) + assert.Equal(t, i64, getInt(v)) + + v = NewNormalInt(uint16(2)) + assert.Equal(t, i64, getInt(v)) + + v = NewNormalInt(uint32(2)) + assert.Equal(t, i64, getInt(v)) + + v = NewNormalInt(uint64(2)) + assert.Equal(t, i64, getInt(v)) + + v = NewNormalInt(uint(2)) + assert.Equal(t, i64, getInt(v)) +} + +func TestNormalValue_NewNormalFloat(t *testing.T) { + f64Frac := float64(2.5) + f64 := float64(2) + + getFloat := func(v NormalValue) float64 { f, _ := v.Float(); return f } + + v := NewNormalFloat(f64Frac) + assert.Equal(t, f64Frac, getFloat(v)) + + v = NewNormalFloat(float32(2.5)) + assert.Equal(t, f64Frac, getFloat(v)) + + v = NewNormalFloat(int8(2)) + assert.Equal(t, f64, getFloat(v)) + + v = NewNormalFloat(int16(2)) + assert.Equal(t, f64, getFloat(v)) + + v = NewNormalFloat(int32(2)) + assert.Equal(t, f64, getFloat(v)) + + v = NewNormalFloat(int64(2)) + assert.Equal(t, f64, getFloat(v)) + + v = NewNormalFloat(int(2)) + assert.Equal(t, f64, getFloat(v)) + + v = NewNormalFloat(uint8(2)) + assert.Equal(t, f64, getFloat(v)) + + v = NewNormalFloat(uint16(2)) + assert.Equal(t, f64, getFloat(v)) + + v = NewNormalFloat(uint32(2)) + assert.Equal(t, f64, getFloat(v)) + + v = NewNormalFloat(uint64(2)) + assert.Equal(t, f64, getFloat(v)) + + v = NewNormalFloat(uint(2)) + assert.Equal(t, f64, getFloat(v)) +} + +func TestNormalValue_NewNormalString(t *testing.T) { + strInput := "str" + + getString := func(v NormalValue) string { s, _ := v.String(); return s } + + v := NewNormalString(strInput) + assert.Equal(t, strInput, getString(v)) + + v = NewNormalString([]byte{'s', 't', 'r'}) + assert.Equal(t, strInput, getString(v)) +} + +func TestNormalValue_NewNormalBytes(t *testing.T) { + bytesInput := []byte("str") + + getBytes := func(v NormalValue) []byte { b, _ := v.Bytes(); return b } + + v := NewNormalBytes(bytesInput) + assert.Equal(t, bytesInput, getBytes(v)) + + v = NewNormalBytes("str") + assert.Equal(t, bytesInput, getBytes(v)) +} + +func TestNormalValue_NewNormalIntArray(t *testing.T) { + i64Input := []int64{2} + + getIntArray := func(v NormalValue) []int64 { i, _ := v.IntArray(); return i } + + v := NewNormalIntArray(i64Input) + assert.Equal(t, i64Input, getIntArray(v)) + + v = NewNormalIntArray([]float32{2.5}) + assert.Equal(t, i64Input, getIntArray(v)) + + v = NewNormalIntArray([]int8{2}) + assert.Equal(t, i64Input, getIntArray(v)) + + v = NewNormalIntArray([]int16{2}) + assert.Equal(t, i64Input, getIntArray(v)) + + v = NewNormalIntArray([]int32{2}) + assert.Equal(t, i64Input, getIntArray(v)) + + v = NewNormalIntArray([]int64{2}) + assert.Equal(t, i64Input, getIntArray(v)) + + v = NewNormalIntArray([]int{2}) + assert.Equal(t, i64Input, getIntArray(v)) + + v = NewNormalIntArray([]uint8{2}) + assert.Equal(t, i64Input, getIntArray(v)) + + v = NewNormalIntArray([]uint16{2}) + assert.Equal(t, i64Input, getIntArray(v)) + + v = NewNormalIntArray([]uint32{2}) + assert.Equal(t, i64Input, getIntArray(v)) + + v = NewNormalIntArray([]uint64{2}) + assert.Equal(t, i64Input, getIntArray(v)) + + v = NewNormalIntArray([]uint{2}) + assert.Equal(t, i64Input, getIntArray(v)) +} + +func TestNormalValue_NewNormalFloatArray(t *testing.T) { + f64InputFrac := []float64{2.5} + f64Input := []float64{2.0} + + getFloatArray := func(v NormalValue) []float64 { f, _ := v.FloatArray(); return f } + + v := NewNormalFloatArray(f64InputFrac) + assert.Equal(t, f64InputFrac, getFloatArray(v)) + + v = NewNormalFloatArray([]float32{2.5}) + assert.Equal(t, f64InputFrac, getFloatArray(v)) + + v = NewNormalFloatArray([]int8{2}) + assert.Equal(t, f64Input, getFloatArray(v)) + + v = NewNormalFloatArray([]int16{2}) + assert.Equal(t, f64Input, getFloatArray(v)) + + v = NewNormalFloatArray([]int32{2}) + assert.Equal(t, f64Input, getFloatArray(v)) + + v = NewNormalFloatArray([]int64{2}) + assert.Equal(t, f64Input, getFloatArray(v)) + + v = NewNormalFloatArray([]int{2}) + assert.Equal(t, f64Input, getFloatArray(v)) + + v = NewNormalFloatArray([]uint8{2}) + assert.Equal(t, f64Input, getFloatArray(v)) + + v = NewNormalFloatArray([]uint16{2}) + assert.Equal(t, f64Input, getFloatArray(v)) + + v = NewNormalFloatArray([]uint32{2}) + assert.Equal(t, f64Input, getFloatArray(v)) + + v = NewNormalFloatArray([]uint64{2}) + assert.Equal(t, f64Input, getFloatArray(v)) + + v = NewNormalFloatArray([]uint{2}) + assert.Equal(t, f64Input, getFloatArray(v)) +} + +func TestNormalValue_NewNormalStringArray(t *testing.T) { + strInput := []string{"str"} + + getStringArray := func(v NormalValue) []string { s, _ := v.StringArray(); return s } + + v := NewNormalStringArray(strInput) + assert.Equal(t, strInput, getStringArray(v)) + + v = NewNormalStringArray([][]byte{{'s', 't', 'r'}}) + assert.Equal(t, strInput, getStringArray(v)) +} + +func TestNormalValue_NewNormalBytesArray(t *testing.T) { + bytesInput := [][]byte{[]byte("str")} + + getBytesArray := func(v NormalValue) [][]byte { b, _ := v.BytesArray(); return b } + + v := NewNormalBytesArray(bytesInput) + assert.Equal(t, bytesInput, getBytesArray(v)) + + v = NewNormalBytesArray([]string{"str"}) + assert.Equal(t, bytesInput, getBytesArray(v)) +} + +func TestNormalValue_NewNormalNillableFloatArray(t *testing.T) { + f64InputFrac := []immutable.Option[float64]{immutable.Some(2.5)} + f64Input := []immutable.Option[float64]{immutable.Some(2.0)} + + getNillableFloatArray := func(v NormalValue) []immutable.Option[float64] { f, _ := v.NillableFloatArray(); return f } + + v := NewNormalNillableFloatArray(f64InputFrac) + assert.Equal(t, f64InputFrac, getNillableFloatArray(v)) + + v = NewNormalNillableFloatArray([]immutable.Option[float32]{immutable.Some[float32](2.5)}) + assert.Equal(t, f64InputFrac, getNillableFloatArray(v)) + + v = NewNormalNillableFloatArray([]immutable.Option[int8]{immutable.Some[int8](2)}) + assert.Equal(t, f64Input, getNillableFloatArray(v)) + + v = NewNormalNillableFloatArray([]immutable.Option[int16]{immutable.Some[int16](2)}) + assert.Equal(t, f64Input, getNillableFloatArray(v)) + + v = NewNormalNillableFloatArray([]immutable.Option[int32]{immutable.Some[int32](2)}) + assert.Equal(t, f64Input, getNillableFloatArray(v)) + + v = NewNormalNillableFloatArray([]immutable.Option[int64]{immutable.Some[int64](2)}) + assert.Equal(t, f64Input, getNillableFloatArray(v)) + + v = NewNormalNillableFloatArray([]immutable.Option[int]{immutable.Some[int](2)}) + assert.Equal(t, f64Input, getNillableFloatArray(v)) + + v = NewNormalNillableFloatArray([]immutable.Option[uint8]{immutable.Some[uint8](2)}) + assert.Equal(t, f64Input, getNillableFloatArray(v)) + + v = NewNormalNillableFloatArray([]immutable.Option[uint16]{immutable.Some[uint16](2)}) + assert.Equal(t, f64Input, getNillableFloatArray(v)) + + v = NewNormalNillableFloatArray([]immutable.Option[uint32]{immutable.Some[uint32](2)}) + assert.Equal(t, f64Input, getNillableFloatArray(v)) + + v = NewNormalNillableFloatArray([]immutable.Option[uint64]{immutable.Some[uint64](2)}) + assert.Equal(t, f64Input, getNillableFloatArray(v)) + + v = NewNormalNillableFloatArray([]immutable.Option[uint]{immutable.Some[uint](2)}) + assert.Equal(t, f64Input, getNillableFloatArray(v)) +} + +func TestNormalValue_NewNormalNillableIntArray(t *testing.T) { + i64Input := []immutable.Option[int64]{immutable.Some[int64](2)} + + getNillableIntArray := func(v NormalValue) []immutable.Option[int64] { i, _ := v.NillableIntArray(); return i } + + v := NewNormalNillableIntArray(i64Input) + assert.Equal(t, i64Input, getNillableIntArray(v)) + + v = NewNormalNillableIntArray([]immutable.Option[float32]{immutable.Some[float32](2.5)}) + assert.Equal(t, i64Input, getNillableIntArray(v)) + + v = NewNormalNillableIntArray([]immutable.Option[float64]{immutable.Some[float64](2.5)}) + assert.Equal(t, i64Input, getNillableIntArray(v)) + + v = NewNormalNillableIntArray([]immutable.Option[int8]{immutable.Some[int8](2)}) + assert.Equal(t, i64Input, getNillableIntArray(v)) + + v = NewNormalNillableIntArray([]immutable.Option[int16]{immutable.Some[int16](2)}) + assert.Equal(t, i64Input, getNillableIntArray(v)) + + v = NewNormalNillableIntArray([]immutable.Option[int32]{immutable.Some[int32](2)}) + assert.Equal(t, i64Input, getNillableIntArray(v)) + + v = NewNormalNillableIntArray([]immutable.Option[int]{immutable.Some[int](2)}) + assert.Equal(t, i64Input, getNillableIntArray(v)) + + v = NewNormalNillableIntArray([]immutable.Option[uint8]{immutable.Some[uint8](2)}) + assert.Equal(t, i64Input, getNillableIntArray(v)) + + v = NewNormalNillableIntArray([]immutable.Option[uint16]{immutable.Some[uint16](2)}) + assert.Equal(t, i64Input, getNillableIntArray(v)) + + v = NewNormalNillableIntArray([]immutable.Option[uint32]{immutable.Some[uint32](2)}) + assert.Equal(t, i64Input, getNillableIntArray(v)) + + v = NewNormalNillableIntArray([]immutable.Option[uint64]{immutable.Some[uint64](2)}) + assert.Equal(t, i64Input, getNillableIntArray(v)) + + v = NewNormalNillableIntArray([]immutable.Option[uint]{immutable.Some[uint](2)}) + assert.Equal(t, i64Input, getNillableIntArray(v)) +} + +func TestNormalValue_NewNormalNillableStringArray(t *testing.T) { + strInput := []immutable.Option[string]{immutable.Some("str")} + + getNillableStringArray := func(v NormalValue) []immutable.Option[string] { s, _ := v.NillableStringArray(); return s } + + v := NewNormalNillableStringArray(strInput) + assert.Equal(t, strInput, getNillableStringArray(v)) + + v = NewNormalNillableStringArray([]immutable.Option[[]byte]{immutable.Some[[]byte]([]byte{'s', 't', 'r'})}) + assert.Equal(t, strInput, getNillableStringArray(v)) +} + +func TestNormalValue_NewNormalNillableBytesArray(t *testing.T) { + bytesInput := []immutable.Option[[]byte]{immutable.Some[[]byte]([]byte("str"))} + + getNillableBytesArray := func(v NormalValue) []immutable.Option[[]byte] { b, _ := v.NillableBytesArray(); return b } + + v := NewNormalNillableBytesArray(bytesInput) + assert.Equal(t, bytesInput, getNillableBytesArray(v)) + + v = NewNormalNillableBytesArray([]immutable.Option[string]{immutable.Some("str")}) + assert.Equal(t, bytesInput, getNillableBytesArray(v)) +} + +func TestNormalValue_NewNormalIntArrayNillable(t *testing.T) { + i64Input := immutable.Some([]int64{2}) + + getIntNillableArray := func(v NormalValue) immutable.Option[[]int64] { i, _ := v.IntNillableArray(); return i } + + v := NewNormalIntNillableArray(i64Input) + assert.Equal(t, i64Input, getIntNillableArray(v)) + + v = NewNormalIntNillableArray(immutable.Some([]float32{2.5})) + assert.Equal(t, i64Input, getIntNillableArray(v)) + + v = NewNormalIntNillableArray(immutable.Some([]float64{2.5})) + assert.Equal(t, i64Input, getIntNillableArray(v)) + + v = NewNormalIntNillableArray(immutable.Some([]int8{2})) + assert.Equal(t, i64Input, getIntNillableArray(v)) + + v = NewNormalIntNillableArray(immutable.Some([]int16{2})) + assert.Equal(t, i64Input, getIntNillableArray(v)) + + v = NewNormalIntNillableArray(immutable.Some([]int32{2})) + assert.Equal(t, i64Input, getIntNillableArray(v)) + + v = NewNormalIntNillableArray(immutable.Some([]int{2})) + assert.Equal(t, i64Input, getIntNillableArray(v)) + + v = NewNormalIntNillableArray(immutable.Some([]uint8{2})) + assert.Equal(t, i64Input, getIntNillableArray(v)) + + v = NewNormalIntNillableArray(immutable.Some([]uint16{2})) + assert.Equal(t, i64Input, getIntNillableArray(v)) + + v = NewNormalIntNillableArray(immutable.Some([]uint32{2})) + assert.Equal(t, i64Input, getIntNillableArray(v)) + + v = NewNormalIntNillableArray(immutable.Some([]uint64{2})) + assert.Equal(t, i64Input, getIntNillableArray(v)) + + v = NewNormalIntNillableArray(immutable.Some([]uint{2})) + assert.Equal(t, i64Input, getIntNillableArray(v)) +} + +func TestNormalValue_NewNormalFloatNillableArray(t *testing.T) { + f64InputFrac := immutable.Some([]float64{2.5}) + f64Input := immutable.Some([]float64{2.0}) + + getFloatNillableArray := func(v NormalValue) immutable.Option[[]float64] { f, _ := v.FloatNillableArray(); return f } + + v := NewNormalFloatNillableArray(f64InputFrac) + assert.Equal(t, f64InputFrac, getFloatNillableArray(v)) + + v = NewNormalFloatNillableArray(immutable.Some([]float32{2.5})) + assert.Equal(t, f64InputFrac, getFloatNillableArray(v)) + + v = NewNormalFloatNillableArray(immutable.Some([]int8{2})) + assert.Equal(t, f64Input, getFloatNillableArray(v)) + + v = NewNormalFloatNillableArray(immutable.Some([]int16{2})) + assert.Equal(t, f64Input, getFloatNillableArray(v)) + + v = NewNormalFloatNillableArray(immutable.Some([]int32{2})) + assert.Equal(t, f64Input, getFloatNillableArray(v)) + + v = NewNormalFloatNillableArray(immutable.Some([]int64{2})) + assert.Equal(t, f64Input, getFloatNillableArray(v)) + + v = NewNormalFloatNillableArray(immutable.Some([]int{2})) + assert.Equal(t, f64Input, getFloatNillableArray(v)) + + v = NewNormalFloatNillableArray(immutable.Some([]uint8{2})) + assert.Equal(t, f64Input, getFloatNillableArray(v)) + + v = NewNormalFloatNillableArray(immutable.Some([]uint16{2})) + assert.Equal(t, f64Input, getFloatNillableArray(v)) + + v = NewNormalFloatNillableArray(immutable.Some([]uint32{2})) + assert.Equal(t, f64Input, getFloatNillableArray(v)) + + v = NewNormalFloatNillableArray(immutable.Some([]uint64{2})) + assert.Equal(t, f64Input, getFloatNillableArray(v)) + + v = NewNormalFloatNillableArray(immutable.Some([]uint{2})) + assert.Equal(t, f64Input, getFloatNillableArray(v)) +} + +func TestNormalValue_NewNormalStringNillableArray(t *testing.T) { + strInput := immutable.Some([]string{"str"}) + + getStringNillableArray := func(v NormalValue) immutable.Option[[]string] { s, _ := v.StringNillableArray(); return s } + + v := NewNormalStringNillableArray(strInput) + assert.Equal(t, strInput, getStringNillableArray(v)) + + v = NewNormalStringNillableArray(immutable.Some([][]byte{{'s', 't', 'r'}})) + assert.Equal(t, strInput, getStringNillableArray(v)) +} + +func TestNormalValue_NewNormalBytesNillableArray(t *testing.T) { + bytesInput := immutable.Some([][]byte{{'s', 't', 'r'}}) + + getBytesNillableArray := func(v NormalValue) immutable.Option[[][]byte] { s, _ := v.BytesNillableArray(); return s } + + v := NewNormalBytesNillableArray(immutable.Some([]string{"str"})) + assert.Equal(t, bytesInput, getBytesNillableArray(v)) + + v = NewNormalBytesNillableArray(bytesInput) + assert.Equal(t, bytesInput, getBytesNillableArray(v)) +} + +func TestNormalValue_NewNormalNillableIntNillableArray(t *testing.T) { + i64Input := immutable.Some([]immutable.Option[int64]{immutable.Some(int64(2))}) + + getNillableIntNillableArray := func(v NormalValue) immutable.Option[[]immutable.Option[int64]] { + i, _ := v.NillableIntNillableArray() + return i + } + + v := NewNormalNillableIntNillableArray(i64Input) + assert.Equal(t, i64Input, getNillableIntNillableArray(v)) + + v = NewNormalNillableIntNillableArray(immutable.Some([]immutable.Option[float32]{immutable.Some(float32(2.5))})) + assert.Equal(t, i64Input, getNillableIntNillableArray(v)) + + v = NewNormalNillableIntNillableArray(immutable.Some([]immutable.Option[float64]{immutable.Some(2.5)})) + assert.Equal(t, i64Input, getNillableIntNillableArray(v)) + + v = NewNormalNillableIntNillableArray(immutable.Some([]immutable.Option[int8]{immutable.Some(int8(2))})) + assert.Equal(t, i64Input, getNillableIntNillableArray(v)) + + v = NewNormalNillableIntNillableArray(immutable.Some([]immutable.Option[int16]{immutable.Some(int16(2))})) + assert.Equal(t, i64Input, getNillableIntNillableArray(v)) + + v = NewNormalNillableIntNillableArray(immutable.Some([]immutable.Option[int32]{immutable.Some(int32(2))})) + assert.Equal(t, i64Input, getNillableIntNillableArray(v)) + + v = NewNormalNillableIntNillableArray(immutable.Some([]immutable.Option[int]{immutable.Some(int(2))})) + assert.Equal(t, i64Input, getNillableIntNillableArray(v)) + + v = NewNormalNillableIntNillableArray(immutable.Some([]immutable.Option[uint8]{immutable.Some(uint8(2))})) + assert.Equal(t, i64Input, getNillableIntNillableArray(v)) + + v = NewNormalNillableIntNillableArray(immutable.Some([]immutable.Option[uint16]{immutable.Some(uint16(2))})) + assert.Equal(t, i64Input, getNillableIntNillableArray(v)) + + v = NewNormalNillableIntNillableArray(immutable.Some([]immutable.Option[uint32]{immutable.Some(uint32(2))})) + assert.Equal(t, i64Input, getNillableIntNillableArray(v)) + + v = NewNormalNillableIntNillableArray(immutable.Some([]immutable.Option[uint64]{immutable.Some(uint64(2))})) + assert.Equal(t, i64Input, getNillableIntNillableArray(v)) + + v = NewNormalNillableIntNillableArray(immutable.Some([]immutable.Option[uint]{immutable.Some(uint(2))})) + assert.Equal(t, i64Input, getNillableIntNillableArray(v)) +} + +func TestNormalValue_NewNormalNillableFloatNillableArray(t *testing.T) { + f64InputFrac := immutable.Some([]immutable.Option[float64]{immutable.Some(2.5)}) + f64Input := immutable.Some([]immutable.Option[float64]{immutable.Some(2.0)}) + + getNillableFloatNillableArray := func(v NormalValue) immutable.Option[[]immutable.Option[float64]] { + f, _ := v.NillableFloatNillableArray() + return f + } + + v := NewNormalNillableFloatNillableArray(f64InputFrac) + assert.Equal(t, f64InputFrac, getNillableFloatNillableArray(v)) + + v = NewNormalNillableFloatNillableArray(immutable.Some([]immutable.Option[float32]{immutable.Some(float32(2.5))})) + assert.Equal(t, f64InputFrac, getNillableFloatNillableArray(v)) + + v = NewNormalNillableFloatNillableArray(immutable.Some([]immutable.Option[int8]{immutable.Some(int8(2))})) + assert.Equal(t, f64Input, getNillableFloatNillableArray(v)) + + v = NewNormalNillableFloatNillableArray(immutable.Some([]immutable.Option[int16]{immutable.Some(int16(2))})) + assert.Equal(t, f64Input, getNillableFloatNillableArray(v)) + + v = NewNormalNillableFloatNillableArray(immutable.Some([]immutable.Option[int32]{immutable.Some(int32(2))})) + assert.Equal(t, f64Input, getNillableFloatNillableArray(v)) + + v = NewNormalNillableFloatNillableArray(immutable.Some([]immutable.Option[int64]{immutable.Some(int64(2))})) + assert.Equal(t, f64Input, getNillableFloatNillableArray(v)) + + v = NewNormalNillableFloatNillableArray(immutable.Some([]immutable.Option[int]{immutable.Some(2)})) + assert.Equal(t, f64Input, getNillableFloatNillableArray(v)) + + v = NewNormalNillableFloatNillableArray(immutable.Some([]immutable.Option[uint8]{immutable.Some(uint8(2))})) + assert.Equal(t, f64Input, getNillableFloatNillableArray(v)) + + v = NewNormalNillableFloatNillableArray(immutable.Some([]immutable.Option[uint16]{immutable.Some(uint16(2))})) + assert.Equal(t, f64Input, getNillableFloatNillableArray(v)) + + v = NewNormalNillableFloatNillableArray(immutable.Some([]immutable.Option[uint32]{immutable.Some(uint32(2))})) + assert.Equal(t, f64Input, getNillableFloatNillableArray(v)) + + v = NewNormalNillableFloatNillableArray(immutable.Some([]immutable.Option[uint64]{immutable.Some(uint64(2))})) + assert.Equal(t, f64Input, getNillableFloatNillableArray(v)) + + v = NewNormalNillableFloatNillableArray(immutable.Some([]immutable.Option[uint]{immutable.Some(uint(2))})) + assert.Equal(t, f64Input, getNillableFloatNillableArray(v)) +} + +func TestNormalValue_NewNormalNillableStringNillableArray(t *testing.T) { + strInput := immutable.Some([]immutable.Option[string]{immutable.Some("str")}) + + getNillableStringNillableArray := func(v NormalValue) immutable.Option[[]immutable.Option[string]] { + s, _ := v.NillableStringNillableArray() + return s + } + + v := NewNormalNillableStringNillableArray(strInput) + assert.Equal(t, strInput, getNillableStringNillableArray(v)) + + bytesInput := immutable.Some([]immutable.Option[[]byte]{immutable.Some([]byte{'s', 't', 'r'})}) + v = NewNormalNillableStringNillableArray(bytesInput) + assert.Equal(t, strInput, getNillableStringNillableArray(v)) +} + +func TestNormalValue_NewNormalNillableBytesNillableArray(t *testing.T) { + bytesInput := immutable.Some([]immutable.Option[[]byte]{immutable.Some([]byte{'s', 't', 'r'})}) + + getNillableBytesNillableArray := func(v NormalValue) immutable.Option[[]immutable.Option[[]byte]] { + s, _ := v.NillableBytesNillableArray() + return s + } + + v := NewNormalNillableBytesNillableArray(bytesInput) + assert.Equal(t, bytesInput, getNillableBytesNillableArray(v)) + + strInput := immutable.Some([]immutable.Option[string]{immutable.Some("str")}) + v = NewNormalNillableBytesNillableArray(strInput) + assert.Equal(t, bytesInput, getNillableBytesNillableArray(v)) +} + +func TestNormalValue_NewNormalNil(t *testing.T) { + fieldKinds := []FieldKind{} + for _, kind := range FieldKindStringToEnumMapping { + fieldKinds = append(fieldKinds, kind) + } + fieldKinds = append(fieldKinds, ObjectKind("Object")) + fieldKinds = append(fieldKinds, ObjectArrayKind("ObjectArr")) + + for _, kind := range fieldKinds { + if kind.IsNillable() { + v, err := NewNormalNil(kind) + require.NoError(t, err) + + assert.True(t, v.IsNil()) + } else { + _, err := NewNormalNil(kind) + require.Error(t, err) + } + } +} + +func TestNormalValue_ToArrayOfNormalValues(t *testing.T) { + now := time.Now() + doc1 := &Document{} + doc2 := &Document{} + + normalNil, err := NewNormalNil(FieldKind_NILLABLE_INT) + require.NoError(t, err) + + tests := []struct { + name string + input NormalValue + expected []NormalValue + err string + }{ + { + name: "nil", + input: normalNil, + }, + { + name: "not array", + input: NewNormalInt(1), + err: errCanNotTurnNormalValueIntoArray, + }, + { + name: "bool elements", + input: NewNormalBoolArray([]bool{true, false}), + expected: []NormalValue{NewNormalBool(true), NewNormalBool(false)}, + }, + { + name: "int elements", + input: NewNormalIntArray([]int64{1, 2}), + expected: []NormalValue{NewNormalInt(1), NewNormalInt(2)}, + }, + { + name: "float elements", + input: NewNormalFloatArray([]float64{1.0, 2.0}), + expected: []NormalValue{NewNormalFloat(1.0), NewNormalFloat(2.0)}, + }, + { + name: "string elements", + input: NewNormalStringArray([]string{"test", "test2"}), + expected: []NormalValue{NewNormalString("test"), NewNormalString("test2")}, + }, + { + name: "bytes elements", + input: NewNormalBytesArray([][]byte{{1, 2, 3}, {4, 5, 6}}), + expected: []NormalValue{NewNormalBytes([]byte{1, 2, 3}), NewNormalBytes([]byte{4, 5, 6})}, + }, + { + name: "time elements", + input: NewNormalTimeArray([]time.Time{now, now}), + expected: []NormalValue{NewNormalTime(now), NewNormalTime(now)}, + }, + { + name: "document elements", + input: NewNormalDocumentArray([]*Document{doc1, doc2}), + expected: []NormalValue{NewNormalDocument(doc1), NewNormalDocument(doc2)}, + }, + { + name: "nillable bool elements", + input: NewNormalNillableBoolArray([]immutable.Option[bool]{ + immutable.Some(true), immutable.Some(false)}), + expected: []NormalValue{ + NewNormalNillableBool(immutable.Some(true)), + NewNormalNillableBool(immutable.Some(false)), + }, + }, + { + name: "nillable int elements", + input: NewNormalNillableIntArray([]immutable.Option[int64]{ + immutable.Some(int64(1)), immutable.Some(int64(2))}), + expected: []NormalValue{ + NewNormalNillableInt(immutable.Some(int64(1))), + NewNormalNillableInt(immutable.Some(int64(2))), + }, + }, + { + name: "nillable float elements", + input: NewNormalNillableFloatArray([]immutable.Option[float64]{ + immutable.Some(1.0), immutable.Some(2.0)}), + expected: []NormalValue{ + NewNormalNillableFloat(immutable.Some(1.0)), + NewNormalNillableFloat(immutable.Some(2.0)), + }, + }, + { + name: "nillable string elements", + input: NewNormalNillableStringArray([]immutable.Option[string]{ + immutable.Some("test"), immutable.Some("test2")}), + expected: []NormalValue{ + NewNormalNillableString(immutable.Some("test")), + NewNormalNillableString(immutable.Some("test2")), + }, + }, + { + name: "nillable bytes elements", + input: NewNormalNillableBytesArray([]immutable.Option[[]byte]{ + immutable.Some([]byte{1, 2, 3}), immutable.Some([]byte{4, 5, 6})}), + expected: []NormalValue{ + NewNormalNillableBytes(immutable.Some([]byte{1, 2, 3})), + NewNormalNillableBytes(immutable.Some([]byte{4, 5, 6})), + }, + }, + { + name: "nillable time elements", + input: NewNormalNillableTimeArray([]immutable.Option[time.Time]{ + immutable.Some(now), immutable.Some(now)}), + expected: []NormalValue{ + NewNormalNillableTime(immutable.Some(now)), + NewNormalNillableTime(immutable.Some(now)), + }, + }, + { + name: "nillable document elements", + input: NewNormalNillableDocumentArray([]immutable.Option[*Document]{ + immutable.Some(doc1), immutable.Some(doc2)}), + expected: []NormalValue{ + NewNormalNillableDocument(immutable.Some(doc1)), + NewNormalNillableDocument(immutable.Some(doc2)), + }, + }, + { + name: "nillable array of bool elements", + input: NewNormalBoolNillableArray(immutable.Some([]bool{true})), + expected: []NormalValue{NewNormalBool(true)}, + }, + { + name: "nillable array of int elements", + input: NewNormalIntNillableArray(immutable.Some([]int64{1})), + expected: []NormalValue{NewNormalInt(1)}, + }, + { + name: "nillable array of float elements", + input: NewNormalFloatNillableArray(immutable.Some([]float64{1.0})), + expected: []NormalValue{NewNormalFloat(1.0)}, + }, + { + name: "nillable array of string elements", + input: NewNormalStringNillableArray(immutable.Some([]string{"test"})), + expected: []NormalValue{NewNormalString("test")}, + }, + { + name: "nillable array of bytes elements", + input: NewNormalBytesNillableArray(immutable.Some([][]byte{{1, 2, 3}})), + expected: []NormalValue{NewNormalBytes([]byte{1, 2, 3})}, + }, + { + name: "nillable array of time elements", + input: NewNormalTimeNillableArray(immutable.Some([]time.Time{now})), + expected: []NormalValue{NewNormalTime(now)}, + }, + { + name: "nillable array of document elements", + input: NewNormalDocumentNillableArray(immutable.Some([]*Document{doc1})), + expected: []NormalValue{NewNormalDocument(doc1)}, + }, + { + name: "nillable array of nillable bool elements", + input: NewNormalNillableBoolNillableArray( + immutable.Some([]immutable.Option[bool]{immutable.Some(true)})), + expected: []NormalValue{NewNormalNillableBool(immutable.Some(true))}, + }, + { + name: "nillable array of nillable int elements", + input: NewNormalNillableIntNillableArray( + immutable.Some([]immutable.Option[int64]{immutable.Some(int64(1))})), + expected: []NormalValue{NewNormalNillableInt(immutable.Some(int64(1)))}, + }, + { + name: "nillable array of nillable float elements", + input: NewNormalNillableFloatNillableArray( + immutable.Some([]immutable.Option[float64]{immutable.Some(1.0)})), + expected: []NormalValue{NewNormalNillableFloat(immutable.Some(1.0))}, + }, + { + name: "nillable array of nillable string elements", + input: NewNormalNillableStringNillableArray( + immutable.Some([]immutable.Option[string]{immutable.Some("test")})), + expected: []NormalValue{NewNormalNillableString(immutable.Some("test"))}, + }, + { + name: "nillable array of nillable bytes elements", + input: NewNormalNillableBytesNillableArray( + immutable.Some([]immutable.Option[[]byte]{immutable.Some([]byte{1, 2, 3})})), + expected: []NormalValue{NewNormalNillableBytes(immutable.Some([]byte{1, 2, 3}))}, + }, + { + name: "nillable array of nillable time elements", + input: NewNormalNillableTimeNillableArray( + immutable.Some([]immutable.Option[time.Time]{immutable.Some(now)})), + expected: []NormalValue{NewNormalNillableTime(immutable.Some(now))}, + }, + { + name: "nillable array of nillable document elements", + input: NewNormalNillableDocumentNillableArray( + immutable.Some([]immutable.Option[*Document]{immutable.Some(doc1)})), + expected: []NormalValue{NewNormalNillableDocument(immutable.Some(doc1))}, + }, + } + + for _, tt := range tests { + tStr := string(tt.name) + t.Run(tStr, func(t *testing.T) { + actual, err := ToArrayOfNormalValues(tt.input) + if tt.err != "" { + require.ErrorContains(t, err, tt.err) + return + } + + assert.Equal(t, tt.expected, actual) + }) + } +} + +// This test documents a bug where array values +// were not returning the correct value for IsNillable +// and were also not convertible to a normal nil kind. +func TestArrayValue_IsNillable(t *testing.T) { + fieldKinds := []FieldKind{ + FieldKind_BOOL_ARRAY, + FieldKind_INT_ARRAY, + FieldKind_FLOAT_ARRAY, + FieldKind_STRING_ARRAY, + FieldKind_NILLABLE_BOOL_ARRAY, + FieldKind_NILLABLE_INT_ARRAY, + FieldKind_NILLABLE_FLOAT_ARRAY, + FieldKind_NILLABLE_STRING_ARRAY, + } + + for _, kind := range fieldKinds { + assert.True(t, kind.IsNillable()) + + v, err := NewNormalNil(kind) + require.NoError(t, err) + + assert.True(t, v.IsNil()) + } +} diff --git a/client/normal_void.go b/client/normal_void.go new file mode 100644 index 0000000000..e3e29b5094 --- /dev/null +++ b/client/normal_void.go @@ -0,0 +1,205 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package client + +import ( + "time" + + "github.com/sourcenetwork/immutable" +) + +// NormalVoid is a default implementation of NormalValue to be embedded in other types. +// It can be also used to realize Null Object pattern https://en.wikipedia.org/wiki/Null_object_pattern. +type NormalVoid struct{} + +func (NormalVoid) Unwrap() any { + return nil +} + +func (NormalVoid) IsNil() bool { + return false +} + +func (NormalVoid) IsNillable() bool { + return false +} + +func (NormalVoid) Bool() (bool, bool) { + return false, false +} + +func (NormalVoid) Int() (int64, bool) { + return 0, false +} + +func (NormalVoid) Float() (float64, bool) { + return 0, false +} + +func (NormalVoid) String() (string, bool) { + return "", false +} + +func (NormalVoid) Bytes() ([]byte, bool) { + return nil, false +} + +func (NormalVoid) Time() (time.Time, bool) { + return time.Time{}, false +} + +func (NormalVoid) Document() (*Document, bool) { + return nil, false +} + +func (NormalVoid) NillableBool() (immutable.Option[bool], bool) { + return immutable.None[bool](), false +} + +func (NormalVoid) NillableInt() (immutable.Option[int64], bool) { + return immutable.None[int64](), false +} + +func (NormalVoid) NillableFloat() (immutable.Option[float64], bool) { + return immutable.None[float64](), false +} + +func (NormalVoid) NillableString() (immutable.Option[string], bool) { + return immutable.None[string](), false +} + +func (NormalVoid) NillableBytes() (immutable.Option[[]byte], bool) { + return immutable.None[[]byte](), false +} + +func (NormalVoid) NillableTime() (immutable.Option[time.Time], bool) { + return immutable.None[time.Time](), false +} + +func (NormalVoid) NillableDocument() (immutable.Option[*Document], bool) { + return immutable.None[*Document](), false +} + +func (NormalVoid) IsArray() bool { + return false +} + +func (NormalVoid) BoolArray() ([]bool, bool) { + return nil, false +} + +func (NormalVoid) IntArray() ([]int64, bool) { + return nil, false +} + +func (NormalVoid) FloatArray() ([]float64, bool) { + return nil, false +} + +func (NormalVoid) StringArray() ([]string, bool) { + return nil, false +} + +func (NormalVoid) BytesArray() ([][]byte, bool) { + return nil, false +} + +func (NormalVoid) TimeArray() ([]time.Time, bool) { + return nil, false +} + +func (NormalVoid) DocumentArray() ([]*Document, bool) { + return nil, false +} + +func (NormalVoid) NillableBoolArray() ([]immutable.Option[bool], bool) { + return nil, false +} + +func (NormalVoid) NillableIntArray() ([]immutable.Option[int64], bool) { + return nil, false +} + +func (NormalVoid) NillableFloatArray() ([]immutable.Option[float64], bool) { + return nil, false +} + +func (NormalVoid) NillableStringArray() ([]immutable.Option[string], bool) { + return nil, false +} + +func (NormalVoid) NillableBytesArray() ([]immutable.Option[[]byte], bool) { + return nil, false +} + +func (NormalVoid) NillableTimeArray() ([]immutable.Option[time.Time], bool) { + return nil, false +} + +func (NormalVoid) NillableDocumentArray() ([]immutable.Option[*Document], bool) { + return nil, false +} + +func (NormalVoid) BoolNillableArray() (immutable.Option[[]bool], bool) { + return immutable.None[[]bool](), false +} + +func (NormalVoid) IntNillableArray() (immutable.Option[[]int64], bool) { + return immutable.None[[]int64](), false +} + +func (NormalVoid) FloatNillableArray() (immutable.Option[[]float64], bool) { + return immutable.None[[]float64](), false +} + +func (NormalVoid) StringNillableArray() (immutable.Option[[]string], bool) { + return immutable.None[[]string](), false +} + +func (NormalVoid) BytesNillableArray() (immutable.Option[[][]byte], bool) { + return immutable.None[[][]byte](), false +} + +func (NormalVoid) TimeNillableArray() (immutable.Option[[]time.Time], bool) { + return immutable.None[[]time.Time](), false +} + +func (NormalVoid) DocumentNillableArray() (immutable.Option[[]*Document], bool) { + return immutable.None[[]*Document](), false +} + +func (NormalVoid) NillableBoolNillableArray() (immutable.Option[[]immutable.Option[bool]], bool) { + return immutable.None[[]immutable.Option[bool]](), false +} + +func (NormalVoid) NillableIntNillableArray() (immutable.Option[[]immutable.Option[int64]], bool) { + return immutable.None[[]immutable.Option[int64]](), false +} + +func (NormalVoid) NillableFloatNillableArray() (immutable.Option[[]immutable.Option[float64]], bool) { + return immutable.None[[]immutable.Option[float64]](), false +} + +func (NormalVoid) NillableStringNillableArray() (immutable.Option[[]immutable.Option[string]], bool) { + return immutable.None[[]immutable.Option[string]](), false +} + +func (NormalVoid) NillableBytesNillableArray() (immutable.Option[[]immutable.Option[[]byte]], bool) { + return immutable.None[[]immutable.Option[[]byte]](), false +} + +func (NormalVoid) NillableTimeNillableArray() (immutable.Option[[]immutable.Option[time.Time]], bool) { + return immutable.None[[]immutable.Option[time.Time]](), false +} + +func (NormalVoid) NillableDocumentNillableArray() (immutable.Option[[]immutable.Option[*Document]], bool) { + return immutable.None[[]immutable.Option[*Document]](), false +} diff --git a/client/policy.go b/client/policy.go new file mode 100644 index 0000000000..5b877696c2 --- /dev/null +++ b/client/policy.go @@ -0,0 +1,31 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package client + +// PolicyDescription describes a policy which is made up of a valid policyID that is +// registered with acp and has a valid DPI compliant resource name that also +// exists on that policy, the description is already validated. +type PolicyDescription struct { + // ID is the local policyID when using local acp, and global policyID when + // using remote acp with sourcehub. This identifier is externally managed + // by the acp system. + ID string + + // ResourceName is the name of the corresponding resource within the policy. + ResourceName string +} + +// AddPolicyResult wraps the result of successfully adding/registering a Policy. +type AddPolicyResult struct { + // PolicyID is the unique identifier returned by the acp system, + // upon successful creation of a policy. + PolicyID string +} diff --git a/client/request/aggregate.go b/client/request/aggregate.go index 902134b258..fa7188977e 100644 --- a/client/request/aggregate.go +++ b/client/request/aggregate.go @@ -10,20 +10,43 @@ package request -import immutables "github.com/sourcenetwork/immutable" +import "github.com/sourcenetwork/immutable" +// Aggregate represents an aggregate operation upon a set of child properties. +// +// Which aggregate this represents (e.g. _count, _avg, etc.) is determined by its +// [Name] property. type Aggregate struct { Field + // Targets hosts the properties to aggregate. + // + // When multiple properties are selected, their values will be gathered into a single set + // upon which the aggregate will be performed. For example, if this aggregate represents + // and average of the Friends.Age and Parents.Age fields, the result will be the average + // age of all their friends and parents, it will not be an average of their average ages. Targets []*AggregateTarget } +// AggregateTarget represents the target of an [Aggregate]. type AggregateTarget struct { - HostName string - ChildName immutables.Option[string] + Limitable + Offsetable + Orderable + Filterable + + // HostName is the name of the immediate field on the object hosting the aggregate. + // + // For example if averaging Friends.Age on the User collection, this property would be + // "Friends". + HostName string - Limit immutables.Option[uint64] - Offset immutables.Option[uint64] - OrderBy immutables.Option[OrderBy] - Filter immutables.Option[Filter] + // ChildName is the name of the child field on the object navigated to via [HostName]. + // + // It is optional, for example when counting the number of Friends on User, or when aggregating + // scalar arrays, this value will be None. + // + // When averaging Friends.Age on the User collection, this property would be + // "Age". + ChildName immutable.Option[string] } diff --git a/client/request/cid.go b/client/request/cid.go new file mode 100644 index 0000000000..42707d0247 --- /dev/null +++ b/client/request/cid.go @@ -0,0 +1,25 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package request + +import "github.com/sourcenetwork/immutable" + +// CIDFilter is an embeddable struct that hosts a consistent set of properties +// for filtering an aspect of a request by commit CID. +type CIDFilter struct { + // CID is an optional value that selects a single document at the given commit CID + // for processing by the request. + // + // If a commit matching the given CID is not found an error will be returned. The commit + // does not need to be the latest, and this property allows viewing of the document at + // prior revisions. + CID immutable.Option[string] +} diff --git a/client/request/commit.go b/client/request/commit.go index ff65e20822..e44dabf794 100644 --- a/client/request/commit.go +++ b/client/request/commit.go @@ -16,20 +16,34 @@ var ( _ Selection = (*CommitSelect)(nil) ) +// CommitSelect represents the selection of database commits to Defra documents. type CommitSelect struct { Field + ChildSelect - DocID immutable.Option[string] - FieldID immutable.Option[string] - Cid immutable.Option[string] - Depth immutable.Option[uint64] + CIDFilter + + Limitable + Offsetable + Orderable + Groupable - Limit immutable.Option[uint64] - Offset immutable.Option[uint64] - OrderBy immutable.Option[OrderBy] - GroupBy immutable.Option[GroupBy] + // DocID is an optional filter which when provided will limit commits to those + // belonging to the given document. + DocID immutable.Option[string] + + // FieldID is an optional filter which when provided will limit commits to those + // belonging to the given field. + // + // `C` may be provided for document-level (composite) commits. + FieldID immutable.Option[string] - Fields []Selection + // Depth limits the returned commits to being X places in the history away from the + // most current. + // + // For example if a document has been updated 5 times, and a depth of 2 is provided + // only commits for the last two updates will be returned. + Depth immutable.Option[uint64] } func (c CommitSelect) ToSelect() *Select { @@ -38,11 +52,10 @@ func (c CommitSelect) ToSelect() *Select { Name: c.Name, Alias: c.Alias, }, - Limit: c.Limit, - Offset: c.Offset, - OrderBy: c.OrderBy, - GroupBy: c.GroupBy, - Fields: c.Fields, - Root: CommitSelection, + Limitable: c.Limitable, + Offsetable: c.Offsetable, + Orderable: c.Orderable, + Groupable: c.Groupable, + ChildSelect: c.ChildSelect, } } diff --git a/client/request/doc_ids.go b/client/request/doc_ids.go new file mode 100644 index 0000000000..24089d2032 --- /dev/null +++ b/client/request/doc_ids.go @@ -0,0 +1,21 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package request + +import "github.com/sourcenetwork/immutable" + +// DocIDsFilter is an embeddable struct that hosts a consistent set of properties +// for filtering an aspect of a request by document IDs. +type DocIDsFilter struct { + // DocIDs is an optional value that ensures any records processed by the request + // will have one of the given document IDs. + DocIDs immutable.Option[[]string] +} diff --git a/client/request/field.go b/client/request/field.go index 578074671b..636a0d97e8 100644 --- a/client/request/field.go +++ b/client/request/field.go @@ -14,6 +14,12 @@ import "github.com/sourcenetwork/immutable" // Field implements Selection type Field struct { - Name string + // Name contains the name of the field on it's host object. + // + // For example `email` on a `User` collection, or a `_count` aggregate. + Name string + + // Alias is an optional override for Name, if provided results will be returned + // from the query using the Alias instead of the Name. Alias immutable.Option[string] } diff --git a/client/request/filter.go b/client/request/filter.go index 67a80b58e7..aabfafb9b9 100644 --- a/client/request/filter.go +++ b/client/request/filter.go @@ -10,6 +10,8 @@ package request +import "github.com/sourcenetwork/immutable" + const ( FilterOpOr = "_or" FilterOpAnd = "_and" @@ -24,3 +26,11 @@ type Filter struct { // parsed filter conditions Conditions map[string]any } + +// Filterable is an embeddable struct that hosts a consistent set of properties +// for filtering an aspect of a request. +type Filterable struct { + // OrderBy is an optional set of conditions used to filter records prior to + // being processed by the request. + Filter immutable.Option[Filter] +} diff --git a/client/request/group.go b/client/request/group.go index e2fd977a00..b38186cb3a 100644 --- a/client/request/group.go +++ b/client/request/group.go @@ -10,6 +10,22 @@ package request +import "github.com/sourcenetwork/immutable" + type GroupBy struct { Fields []string } + +// Groupable is an embeddable struct that hosts a consistent set of properties +// for grouping an aspect of a request. +type Groupable struct { + // GroupBy is an optional set of fields for which to group the contents of this + // request by. + // + // If this argument is provided, only fields used to group may be rendered in + // the immediate child selector. Additional fields may be selected by using + // the '_group' selector within the immediate child selector. If an empty set + // is provided, the restrictions mentioned still apply, although all results + // will appear within the same group. + GroupBy immutable.Option[GroupBy] +} diff --git a/client/request/limit.go b/client/request/limit.go new file mode 100644 index 0000000000..2e1b1a4ab7 --- /dev/null +++ b/client/request/limit.go @@ -0,0 +1,20 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package request + +import "github.com/sourcenetwork/immutable" + +// Limitable is an embeddable struct that hosts a consistent set of properties +// for limiting an aspect of a request. +type Limitable struct { + // Limit is an optional value that caps the number of results to the number provided. + Limit immutable.Option[uint64] +} diff --git a/client/request/mutation.go b/client/request/mutation.go index 6bff180dd9..81fcc823c9 100644 --- a/client/request/mutation.go +++ b/client/request/mutation.go @@ -10,8 +10,6 @@ package request -import "github.com/sourcenetwork/immutable" - type MutationType int const ( @@ -25,17 +23,24 @@ const ( // all the possible arguments. type ObjectMutation struct { Field + ChildSelect + + Filterable + DocIDsFilter + + // Type is the type of mutatation that this object represents. + // + // For example [CreateObjects]. Type MutationType - // Collection is the target collection name - // if this mutation is on an object. + // Collection is the target collection name. Collection string - IDs immutable.Option[[]string] - Filter immutable.Option[Filter] - Input map[string]any - - Fields []Selection + // Input is the json representation of the fieldName-value pairs of document properties + // to mutate. + // + // This is ignored for [DeleteObjects] mutations. + Input map[string]any } // ToSelect returns a basic Select object, with the same Name, Alias, and Fields as @@ -46,8 +51,8 @@ func (m ObjectMutation) ToSelect() *Select { Name: m.Collection, Alias: m.Alias, }, - Fields: m.Fields, - DocIDs: m.IDs, - Filter: m.Filter, + ChildSelect: m.ChildSelect, + DocIDsFilter: m.DocIDsFilter, + Filterable: m.Filterable, } } diff --git a/client/request/offset.go b/client/request/offset.go new file mode 100644 index 0000000000..5bb2ea723d --- /dev/null +++ b/client/request/offset.go @@ -0,0 +1,22 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package request + +import "github.com/sourcenetwork/immutable" + +// Offsetable is an embeddable struct that hosts a consistent set of properties +// for offsetting an aspect of a request. +type Offsetable struct { + // Offset is an optional value that skips the given number of results that would have + // otherwise been returned. Commonly used alongside the limit argument, + // this argument will still work on its own. + Offset immutable.Option[uint64] +} diff --git a/client/request/order.go b/client/request/order.go index 1fff3953f1..d998843959 100644 --- a/client/request/order.go +++ b/client/request/order.go @@ -10,6 +10,8 @@ package request +import "github.com/sourcenetwork/immutable" + type ( OrderDirection string @@ -29,3 +31,11 @@ type ( Conditions []OrderCondition } ) + +// Orderable is an embeddable struct that hosts a consistent set of properties +// for ordering an aspect of a request. +type Orderable struct { + // OrderBy is an optional set of field-orders which may be used to sort the results. An + // empty set will be ignored. + OrderBy immutable.Option[OrderBy] +} diff --git a/client/request/select.go b/client/request/select.go index 863bba2aeb..0365fb385b 100644 --- a/client/request/select.go +++ b/client/request/select.go @@ -12,16 +12,6 @@ package request import ( "encoding/json" - - "github.com/sourcenetwork/immutable" -) - -// SelectionType is the type of selection. -type SelectionType int - -const ( - ObjectSelection SelectionType = iota - CommitSelection ) // Select is a complex Field with strong typing. @@ -29,22 +19,29 @@ const ( // Includes fields, and request arguments like filters, limits, etc. type Select struct { Field + ChildSelect + + Limitable + Offsetable + Orderable + Filterable + DocIDsFilter + CIDFilter + Groupable + + // ShowDeleted will return deleted documents along with non-deleted ones + // if set to true. + ShowDeleted bool +} - DocIDs immutable.Option[[]string] - CID immutable.Option[string] - - // Root is the top level type of parsed request - Root SelectionType - - Limit immutable.Option[uint64] - Offset immutable.Option[uint64] - OrderBy immutable.Option[OrderBy] - GroupBy immutable.Option[GroupBy] - Filter immutable.Option[Filter] - +// ChildSelect represents a type with selectable child properties. +// +// At least one child must be selected. +type ChildSelect struct { + // Fields contains the set of child properties to return. + // + // At least one child property must be selected. Fields []Selection - - ShowDeleted bool } // Validate validates the Select. @@ -111,25 +108,20 @@ func (s *Select) validateGroupBy() []error { } // selectJson is a private object used for handling json deserialization -// of `Select` objects. +// of [Select] objects. +// +// It contains everything minus the [ChildSelect], which uses a custom UnmarshalJSON +// and is skipped over when embedding due to the way the std lib json pkg works. type selectJson struct { Field - DocIDs immutable.Option[[]string] - CID immutable.Option[string] - Root SelectionType - Limit immutable.Option[uint64] - Offset immutable.Option[uint64] - OrderBy immutable.Option[OrderBy] - GroupBy immutable.Option[GroupBy] - Filter immutable.Option[Filter] + Limitable + Offsetable + Orderable + Filterable + DocIDsFilter + CIDFilter + Groupable ShowDeleted bool - - // Properties above this line match the `Select` object and - // are deserialized using the normal/default logic. - // Properties below this line require custom logic in `UnmarshalJSON` - // in order to be deserialized correctly. - - Fields []map[string]json.RawMessage } func (s *Select) UnmarshalJSON(bytes []byte) error { @@ -142,13 +134,37 @@ func (s *Select) UnmarshalJSON(bytes []byte) error { s.Field = selectMap.Field s.DocIDs = selectMap.DocIDs s.CID = selectMap.CID - s.Root = selectMap.Root - s.Limit = selectMap.Limit - s.Offset = selectMap.Offset - s.OrderBy = selectMap.OrderBy - s.GroupBy = selectMap.GroupBy - s.Filter = selectMap.Filter + s.Limitable = selectMap.Limitable + s.Offsetable = selectMap.Offsetable + s.Orderable = selectMap.Orderable + s.Groupable = selectMap.Groupable + s.Filterable = selectMap.Filterable s.ShowDeleted = selectMap.ShowDeleted + + var childSelect ChildSelect + err = json.Unmarshal(bytes, &childSelect) + if err != nil { + return err + } + + s.ChildSelect = childSelect + + return nil +} + +// childSelectJson is a private object used for handling json deserialization +// of [ChildSelect] objects. +type childSelectJson struct { + Fields []map[string]json.RawMessage +} + +func (s *ChildSelect) UnmarshalJSON(bytes []byte) error { + var selectMap childSelectJson + err := json.Unmarshal(bytes, &selectMap) + if err != nil { + return err + } + s.Fields = make([]Selection, len(selectMap.Fields)) for i, field := range selectMap.Fields { @@ -163,8 +179,8 @@ func (s *Select) UnmarshalJSON(bytes []byte) error { // They must be non-nillable as nil values may have their keys omitted from // the json. This also relies on the fields being unique. We may wish to change // this later to custom-serialize with a `_type` property. - if _, ok := field["Root"]; ok { - // This must be a Select, as only the `Select` type has a `Root` field + if _, ok := field["Fields"]; ok { + // This must be a Select, as only the `Select` type has a `Fields` field var fieldSelect Select err := json.Unmarshal(fieldJson, &fieldSelect) if err != nil { diff --git a/client/request/subscription.go b/client/request/subscription.go index bb4e01156c..08276e7ef7 100644 --- a/client/request/subscription.go +++ b/client/request/subscription.go @@ -19,13 +19,12 @@ import ( // arguments type ObjectSubscription struct { Field + ChildSelect + + Filterable // Collection is the target collection name Collection string - - Filter immutable.Option[Filter] - - Fields []Selection } // ToSelect returns a basic Select object, with the same Name, Alias, and Fields as @@ -36,9 +35,13 @@ func (m ObjectSubscription) ToSelect(docID, cid string) *Select { Name: m.Collection, Alias: m.Alias, }, - DocIDs: immutable.Some([]string{docID}), - CID: immutable.Some(cid), - Fields: m.Fields, - Filter: m.Filter, + DocIDsFilter: DocIDsFilter{ + DocIDs: immutable.Some([]string{docID}), + }, + CIDFilter: CIDFilter{ + immutable.Some(cid), + }, + ChildSelect: m.ChildSelect, + Filterable: m.Filterable, } } diff --git a/client/schema_description.go b/client/schema_description.go new file mode 100644 index 0000000000..2d34b131b8 --- /dev/null +++ b/client/schema_description.go @@ -0,0 +1,56 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package client + +// SchemaDescription describes a Schema and its associated metadata. +type SchemaDescription struct { + // Root is the version agnostic identifier for this schema. + // + // It remains constant throughout the lifetime of this schema. + Root string + + // VersionID is the version-specific identifier for this schema. + // + // It is generated on mutation of this schema and can be used to uniquely + // identify a schema at a specific version. + VersionID string + + // Name is the name of this Schema. + // + // It is currently used to define the Collection Name, and as such these two properties + // will currently share the same name. + // + // It is immutable. + Name string + + // Fields contains the fields globally defined across the node network within this Schema. + // + // Any [CollectionDescription]s that reference this [SchemaDescription] will have a field + // set that contains all of these fields, plus any local only fields (such as the secondary side + // of a relation). + // + // Embedded objects (including within Views) are schema-only, and as such fields of embedded + // objects will not have a corresponding [CollectionFieldDescription]. + // + // Currently new fields may be added after initial declaration, but they cannot be removed. + Fields []SchemaFieldDescription +} + +// GetFieldByName returns the field for the given field name. If such a field is found it +// will return it and true, if it is not found it will return false. +func (s SchemaDescription) GetFieldByName(fieldName string) (SchemaFieldDescription, bool) { + for _, field := range s.Fields { + if field.Name == fieldName { + return field, true + } + } + return SchemaFieldDescription{}, false +} diff --git a/client/schema_field_description.go b/client/schema_field_description.go new file mode 100644 index 0000000000..87ee843ec8 --- /dev/null +++ b/client/schema_field_description.go @@ -0,0 +1,343 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package client + +import ( + "encoding/json" + "strconv" + "strings" +) + +// FieldKind describes the type of a field. +type FieldKind interface { + // String returns the string representation of this FieldKind. + String() string + + // Underlying returns the underlying Kind as a string. + // + // If this is an array, it will return the element kind, else it will return the same as + // [String()]. + Underlying() string + + // IsNillable returns true if this kind supports nil values. + IsNillable() bool + + // IsObject returns true if this FieldKind is an object type, or an array of object types. + IsObject() bool + + // IsObjectArray returns true if this FieldKind is an object array type. + IsObjectArray() bool + + // IsArray returns true if this FieldKind is an array type which includes inline arrays as well + // as relation arrays. + IsArray() bool +} + +// SchemaFieldDescription describes a field on a Schema and its associated metadata. +type SchemaFieldDescription struct { + // Name contains the name of this field. + // + // It is currently immutable. + Name string + + // The data type that this field holds. + // + // Must contain a valid value. It is currently immutable. + Kind FieldKind + + // The CRDT Type of this field. If no type has been provided it will default to [LWW_REGISTER]. + // + // It is currently immutable. + Typ CType +} + +// ScalarKind represents singular scalar field kinds, such as `Int`. +type ScalarKind uint8 + +// ScalarArrayKind represents arrays of simple scalar field kinds, such as `[Int]`. +type ScalarArrayKind uint8 + +// ObjectKind represents singular objects (foreign and embedded), such as `User`. +type ObjectKind string + +// ObjectKind represents arrays of objects (foreign and embedded), such as `[User]`. +type ObjectArrayKind string + +var _ FieldKind = ScalarKind(0) +var _ FieldKind = ScalarArrayKind(0) +var _ FieldKind = ObjectKind("") +var _ FieldKind = ObjectArrayKind("") + +func (k ScalarKind) String() string { + switch k { + case FieldKind_DocID: + return "ID" + case FieldKind_NILLABLE_BOOL: + return "Boolean" + case FieldKind_NILLABLE_INT: + return "Int" + case FieldKind_NILLABLE_DATETIME: + return "DateTime" + case FieldKind_NILLABLE_FLOAT: + return "Float" + case FieldKind_NILLABLE_STRING: + return "String" + case FieldKind_NILLABLE_BLOB: + return "Blob" + case FieldKind_NILLABLE_JSON: + return "JSON" + default: + return strconv.Itoa(int(k)) + } +} + +func (k ScalarKind) Underlying() string { + return k.String() +} + +func (k ScalarKind) IsNillable() bool { + return k != FieldKind_DocID +} + +func (k ScalarKind) IsObject() bool { + return false +} + +func (k ScalarKind) IsObjectArray() bool { + return false +} + +func (k ScalarKind) IsArray() bool { + return false +} + +func (k ScalarArrayKind) String() string { + switch k { + case FieldKind_NILLABLE_BOOL_ARRAY: + return "[Boolean]" + case FieldKind_BOOL_ARRAY: + return "[Boolean!]" + case FieldKind_NILLABLE_INT_ARRAY: + return "[Int]" + case FieldKind_INT_ARRAY: + return "[Int!]" + case FieldKind_NILLABLE_FLOAT_ARRAY: + return "[Float]" + case FieldKind_FLOAT_ARRAY: + return "[Float!]" + case FieldKind_NILLABLE_STRING_ARRAY: + return "[String]" + case FieldKind_STRING_ARRAY: + return "[String!]" + default: + return strconv.Itoa(int(k)) + } +} + +func (k ScalarArrayKind) Underlying() string { + return strings.Trim(k.String(), "[]") +} + +func (k ScalarArrayKind) IsNillable() bool { + return true +} + +func (k ScalarArrayKind) IsObject() bool { + return false +} + +func (k ScalarArrayKind) IsObjectArray() bool { + return false +} + +func (k ScalarArrayKind) IsArray() bool { + return true +} + +func (k ObjectKind) String() string { + return string(k) +} + +func (k ObjectKind) Underlying() string { + return k.String() +} + +func (k ObjectKind) IsNillable() bool { + return true +} + +func (k ObjectKind) IsObject() bool { + return true +} + +func (k ObjectKind) IsObjectArray() bool { + return false +} + +func (k ObjectKind) IsArray() bool { + return false +} + +func (k ObjectArrayKind) String() string { + return "[" + string(k) + "]" +} + +func (k ObjectArrayKind) Underlying() string { + return strings.Trim(k.String(), "[]") +} + +func (k ObjectArrayKind) IsNillable() bool { + return true +} + +func (k ObjectArrayKind) IsObject() bool { + return true +} + +func (k ObjectArrayKind) IsObjectArray() bool { + return true +} + +func (k ObjectArrayKind) IsArray() bool { + return true +} + +func (k ObjectArrayKind) MarshalJSON() ([]byte, error) { + return []byte(`"` + k.String() + `"`), nil +} + +// Note: These values are serialized and persisted in the database, avoid modifying existing values. +const ( + FieldKind_None ScalarKind = 0 + FieldKind_DocID ScalarKind = 1 + FieldKind_NILLABLE_BOOL ScalarKind = 2 + FieldKind_BOOL_ARRAY ScalarArrayKind = 3 + FieldKind_NILLABLE_INT ScalarKind = 4 + FieldKind_INT_ARRAY ScalarArrayKind = 5 + FieldKind_NILLABLE_FLOAT ScalarKind = 6 + FieldKind_FLOAT_ARRAY ScalarArrayKind = 7 + _ ScalarKind = 8 // safe to repurpose (was never used) + _ ScalarKind = 9 // safe to repurpose (previously old field) + FieldKind_NILLABLE_DATETIME ScalarKind = 10 + FieldKind_NILLABLE_STRING ScalarKind = 11 + FieldKind_STRING_ARRAY ScalarArrayKind = 12 + FieldKind_NILLABLE_BLOB ScalarKind = 13 + FieldKind_NILLABLE_JSON ScalarKind = 14 + _ ScalarKind = 15 // safe to repurpose (was never used) + _ ScalarKind = 16 // Deprecated 2024-03-15, was FieldKind_FOREIGN_OBJECT + _ ScalarKind = 17 // Deprecated 2024-03-15, was FieldKind_FOREIGN_OBJECT_ARRAY + FieldKind_NILLABLE_BOOL_ARRAY ScalarArrayKind = 18 + FieldKind_NILLABLE_INT_ARRAY ScalarArrayKind = 19 + FieldKind_NILLABLE_FLOAT_ARRAY ScalarArrayKind = 20 + FieldKind_NILLABLE_STRING_ARRAY ScalarArrayKind = 21 +) + +// FieldKindStringToEnumMapping maps string representations of [FieldKind] values to +// their enum values. +// +// It is currently used to by [db.PatchSchema] to allow string representations of +// [FieldKind] to be provided instead of their raw int values. This usage may expand +// in the future. They currently roughly correspond to the GQL field types, but this +// equality is not guaranteed. +var FieldKindStringToEnumMapping = map[string]FieldKind{ + "ID": FieldKind_DocID, + "Boolean": FieldKind_NILLABLE_BOOL, + "[Boolean]": FieldKind_NILLABLE_BOOL_ARRAY, + "[Boolean!]": FieldKind_BOOL_ARRAY, + "Int": FieldKind_NILLABLE_INT, + "[Int]": FieldKind_NILLABLE_INT_ARRAY, + "[Int!]": FieldKind_INT_ARRAY, + "DateTime": FieldKind_NILLABLE_DATETIME, + "Float": FieldKind_NILLABLE_FLOAT, + "[Float]": FieldKind_NILLABLE_FLOAT_ARRAY, + "[Float!]": FieldKind_FLOAT_ARRAY, + "String": FieldKind_NILLABLE_STRING, + "[String]": FieldKind_NILLABLE_STRING_ARRAY, + "[String!]": FieldKind_STRING_ARRAY, + "Blob": FieldKind_NILLABLE_BLOB, + "JSON": FieldKind_NILLABLE_JSON, +} + +// IsRelation returns true if this field is a relation. +func (f SchemaFieldDescription) IsRelation() bool { + return f.Kind.IsObject() +} + +// schemaFieldDescription is a private type used to facilitate the unmarshalling +// of json to a [SchemaFieldDescription]. +type schemaFieldDescription struct { + Name string + Typ CType + + // Properties below this line are unmarshalled using custom logic in [UnmarshalJSON] + Kind json.RawMessage +} + +func (f *SchemaFieldDescription) UnmarshalJSON(bytes []byte) error { + var descMap schemaFieldDescription + err := json.Unmarshal(bytes, &descMap) + if err != nil { + return err + } + + f.Name = descMap.Name + f.Typ = descMap.Typ + f.Kind, err = parseFieldKind(descMap.Kind) + if err != nil { + return err + } + + return nil +} + +func parseFieldKind(bytes json.RawMessage) (FieldKind, error) { + if len(bytes) == 0 { + return FieldKind_None, nil + } + + if bytes[0] != '"' { + // If the Kind is not represented by a string, assume try to parse it to an int, as + // that is the only other type we support. + var intKind uint8 + err := json.Unmarshal(bytes, &intKind) + if err != nil { + return nil, err + } + switch intKind { + case uint8(FieldKind_BOOL_ARRAY), uint8(FieldKind_INT_ARRAY), uint8(FieldKind_FLOAT_ARRAY), + uint8(FieldKind_STRING_ARRAY), uint8(FieldKind_NILLABLE_BOOL_ARRAY), uint8(FieldKind_NILLABLE_INT_ARRAY), + uint8(FieldKind_NILLABLE_FLOAT_ARRAY), uint8(FieldKind_NILLABLE_STRING_ARRAY): + return ScalarArrayKind(intKind), nil + default: + return ScalarKind(intKind), nil + } + } + + var strKind string + err := json.Unmarshal(bytes, &strKind) + if err != nil { + return nil, err + } + + kind, ok := FieldKindStringToEnumMapping[strKind] + if ok { + return kind, nil + } + + // If we don't find the string representation of this type in the + // scalar mapping, assume it is an object - if it is not, validation + // will catch this later. If it is unknown we have no way of telling + // as to whether the user thought it was a scalar or an object anyway. + if strKind[0] == '[' { + return ObjectArrayKind(strings.Trim(strKind, "[]")), nil + } + return ObjectKind(strKind), nil +} diff --git a/client/value.go b/client/value.go index 261535d8d2..bc84205cd9 100644 --- a/client/value.go +++ b/client/value.go @@ -17,11 +17,11 @@ import ( type FieldValue struct { t CType - value any + value NormalValue isDirty bool } -func NewFieldValue(t CType, val any) *FieldValue { +func NewFieldValue(t CType, val NormalValue) *FieldValue { return &FieldValue{ t: t, value: val, @@ -30,6 +30,10 @@ func NewFieldValue(t CType, val any) *FieldValue { } func (val FieldValue) Value() any { + return val.value.Unwrap() +} + +func (val FieldValue) NormalValue() NormalValue { return val.value } @@ -38,7 +42,7 @@ func (val FieldValue) Type() CType { } func (val FieldValue) IsDocument() bool { - _, ok := val.value.(*Document) + _, ok := val.value.Document() return ok } @@ -62,30 +66,27 @@ func (val FieldValue) Bytes() ([]byte, error) { } var value any - switch tempVal := val.value.(type) { - case []immutable.Option[string]: - value = convertImmutable(tempVal) - case []immutable.Option[int64]: - value = convertImmutable(tempVal) - case []immutable.Option[float64]: - value = convertImmutable(tempVal) - case []immutable.Option[bool]: - value = convertImmutable(tempVal) - default: - value = val.value + if v, ok := val.value.NillableStringArray(); ok { + value = convertImmutable(v) + } else if v, ok := val.value.NillableIntArray(); ok { + value = convertImmutable(v) + } else if v, ok := val.value.NillableFloatArray(); ok { + value = convertImmutable(v) + } else if v, ok := val.value.NillableBoolArray(); ok { + value = convertImmutable(v) + } else { + value = val.value.Unwrap() } return em.Marshal(value) } func convertImmutable[T any](vals []immutable.Option[T]) []any { - var out []any - for _, val := range vals { - if !val.HasValue() { - out = append(out, nil) - continue + out := make([]any, len(vals)) + for i := range vals { + if vals[i].HasValue() { + out[i] = vals[i].Value() } - out = append(out, val.Value()) } return out } diff --git a/core/crdt/pncounter.go b/core/crdt/counter.go similarity index 56% rename from core/crdt/pncounter.go rename to core/crdt/counter.go index 7d8b02c1a4..01ca3cf0da 100644 --- a/core/crdt/pncounter.go +++ b/core/crdt/counter.go @@ -1,4 +1,4 @@ -// Copyright 2023 Democratized Data Foundation +// Copyright 2024 Democratized Data Foundation // // Use of this software is governed by the Business Source License // included in the file licenses/BSL.txt. @@ -33,18 +33,18 @@ import ( var ( // ensure types implements core interfaces - _ core.ReplicatedData = (*PNCounter[float64])(nil) - _ core.ReplicatedData = (*PNCounter[int64])(nil) - _ core.Delta = (*PNCounterDelta[float64])(nil) - _ core.Delta = (*PNCounterDelta[int64])(nil) + _ core.ReplicatedData = (*Counter[float64])(nil) + _ core.ReplicatedData = (*Counter[int64])(nil) + _ core.Delta = (*CounterDelta[float64])(nil) + _ core.Delta = (*CounterDelta[int64])(nil) ) type Incrementable interface { constraints.Integer | constraints.Float } -// PNCounterDelta is a single delta operation for an PNCounter -type PNCounterDelta[T Incrementable] struct { +// CounterDelta is a single delta operation for a Counter +type CounterDelta[T Incrementable] struct { DocID []byte FieldName string Priority uint64 @@ -59,17 +59,17 @@ type PNCounterDelta[T Incrementable] struct { } // GetPriority gets the current priority for this delta. -func (delta *PNCounterDelta[T]) GetPriority() uint64 { +func (delta *CounterDelta[T]) GetPriority() uint64 { return delta.Priority } // SetPriority will set the priority for this delta. -func (delta *PNCounterDelta[T]) SetPriority(prio uint64) { +func (delta *CounterDelta[T]) SetPriority(prio uint64) { delta.Priority = prio } // Marshal encodes the delta using CBOR. -func (delta *PNCounterDelta[T]) Marshal() ([]byte, error) { +func (delta *CounterDelta[T]) Marshal() ([]byte, error) { h := &codec.CborHandle{} buf := bytes.NewBuffer(nil) enc := codec.NewEncoder(buf, h) @@ -81,44 +81,50 @@ func (delta *PNCounterDelta[T]) Marshal() ([]byte, error) { } // Unmarshal decodes the delta from CBOR. -func (delta *PNCounterDelta[T]) Unmarshal(b []byte) error { +func (delta *CounterDelta[T]) Unmarshal(b []byte) error { h := &codec.CborHandle{} dec := codec.NewDecoderBytes(b, h) return dec.Decode(delta) } -// PNCounter, is a simple CRDT type that allows increment/decrement +// Counter, is a simple CRDT type that allows increment/decrement // of an Int and Float data types that ensures convergence. -type PNCounter[T Incrementable] struct { +type Counter[T Incrementable] struct { baseCRDT + AllowDecrement bool } -// NewPNCounter returns a new instance of the PNCounter with the given ID. -func NewPNCounter[T Incrementable]( +// NewCounter returns a new instance of the Counter with the given ID. +func NewCounter[T Incrementable]( store datastore.DSReaderWriter, schemaVersionKey core.CollectionSchemaVersionKey, key core.DataStoreKey, fieldName string, -) PNCounter[T] { - return PNCounter[T]{newBaseCRDT(store, key, schemaVersionKey, fieldName)} + allowDecrement bool, +) Counter[T] { + return Counter[T]{newBaseCRDT(store, key, schemaVersionKey, fieldName), allowDecrement} } -// Value gets the current register value -func (reg PNCounter[T]) Value(ctx context.Context) ([]byte, error) { - valueK := reg.key.WithValueFlag() - buf, err := reg.store.Get(ctx, valueK.ToDS()) +// Value gets the current counter value +func (c Counter[T]) Value(ctx context.Context) ([]byte, error) { + valueK := c.key.WithValueFlag() + buf, err := c.store.Get(ctx, valueK.ToDS()) if err != nil { return nil, err } return buf, nil } -// Set generates a new delta with the supplied value -func (reg PNCounter[T]) Increment(ctx context.Context, value T) (*PNCounterDelta[T], error) { +// Set generates a new delta with the supplied value. +// +// WARNING: Incrementing an integer and causing it to overflow the int64 max value +// will cause the value to roll over to the int64 min value. Incremeting a float and +// causing it to overflow the float64 max value will act like a no-op. +func (c Counter[T]) Increment(ctx context.Context, value T) (*CounterDelta[T], error) { // To ensure that the dag block is unique, we add a random number to the delta. // This is done only on update (if the doc doesn't already exist) to ensure that the // initial dag block of a document can be reproducible. - exists, err := reg.store.Has(ctx, reg.key.ToPrimaryDataStoreKey().ToDS()) + exists, err := c.store.Has(ctx, c.key.ToPrimaryDataStoreKey().ToDS()) if err != nil { return nil, err } @@ -131,29 +137,32 @@ func (reg PNCounter[T]) Increment(ctx context.Context, value T) (*PNCounterDelta nonce = r.Int64() } - return &PNCounterDelta[T]{ - DocID: []byte(reg.key.DocID), - FieldName: reg.fieldName, + return &CounterDelta[T]{ + DocID: []byte(c.key.DocID), + FieldName: c.fieldName, Data: value, - SchemaVersionID: reg.schemaVersionKey.SchemaVersionId, + SchemaVersionID: c.schemaVersionKey.SchemaVersionId, Nonce: nonce, }, nil } // Merge implements ReplicatedData interface. -// It merges two PNCounterRegisty by adding the values together. -func (reg PNCounter[T]) Merge(ctx context.Context, delta core.Delta) error { - d, ok := delta.(*PNCounterDelta[T]) +// It merges two CounterRegisty by adding the values together. +func (c Counter[T]) Merge(ctx context.Context, delta core.Delta) error { + d, ok := delta.(*CounterDelta[T]) if !ok { return ErrMismatchedMergeType } - return reg.incrementValue(ctx, d.Data, d.GetPriority()) + return c.incrementValue(ctx, d.Data, d.GetPriority()) } -func (reg PNCounter[T]) incrementValue(ctx context.Context, value T, priority uint64) error { - key := reg.key.WithValueFlag() - marker, err := reg.store.Get(ctx, reg.key.ToPrimaryDataStoreKey().ToDS()) +func (c Counter[T]) incrementValue(ctx context.Context, value T, priority uint64) error { + if !c.AllowDecrement && value < 0 { + return NewErrNegativeValue(value) + } + key := c.key.WithValueFlag() + marker, err := c.store.Get(ctx, c.key.ToPrimaryDataStoreKey().ToDS()) if err != nil && !errors.Is(err, ds.ErrNotFound) { return err } @@ -161,7 +170,7 @@ func (reg PNCounter[T]) incrementValue(ctx context.Context, value T, priority ui key = key.WithDeletedFlag() } - curValue, err := reg.getCurrentValue(ctx, key) + curValue, err := c.getCurrentValue(ctx, key) if err != nil { return err } @@ -172,16 +181,16 @@ func (reg PNCounter[T]) incrementValue(ctx context.Context, value T, priority ui return err } - err = reg.store.Put(ctx, key.ToDS(), b) + err = c.store.Put(ctx, key.ToDS(), b) if err != nil { return NewErrFailedToStoreValue(err) } - return reg.setPriority(ctx, reg.key, priority) + return c.setPriority(ctx, c.key, priority) } -func (reg PNCounter[T]) getCurrentValue(ctx context.Context, key core.DataStoreKey) (T, error) { - curValue, err := reg.store.Get(ctx, key.ToDS()) +func (c Counter[T]) getCurrentValue(ctx context.Context, key core.DataStoreKey) (T, error) { + curValue, err := c.store.Get(ctx, key.ToDS()) if err != nil { if errors.Is(err, ds.ErrNotFound) { return 0, nil @@ -192,14 +201,14 @@ func (reg PNCounter[T]) getCurrentValue(ctx context.Context, key core.DataStoreK return getNumericFromBytes[T](curValue) } -// DeltaDecode is a typed helper to extract a PNCounterDelta from a ipld.Node -func (reg PNCounter[T]) DeltaDecode(node ipld.Node) (core.Delta, error) { +// DeltaDecode is a typed helper to extract a CounterDelta from a ipld.Node +func (c Counter[T]) DeltaDecode(node ipld.Node) (core.Delta, error) { pbNode, ok := node.(*dag.ProtoNode) if !ok { return nil, client.NewErrUnexpectedType[*dag.ProtoNode]("ipld.Node", node) } - delta := &PNCounterDelta[T]{} + delta := &CounterDelta[T]{} err := delta.Unmarshal(pbNode.Data()) if err != nil { return nil, err @@ -208,6 +217,13 @@ func (reg PNCounter[T]) DeltaDecode(node ipld.Node) (core.Delta, error) { return delta, nil } +func (c Counter[T]) CType() client.CType { + if c.AllowDecrement { + return client.PN_COUNTER + } + return client.P_COUNTER +} + func getNumericFromBytes[T Incrementable](b []byte) (T, error) { var val T err := cbor.Unmarshal(b, &val) diff --git a/core/crdt/errors.go b/core/crdt/errors.go index e1148d1044..75af579850 100644 --- a/core/crdt/errors.go +++ b/core/crdt/errors.go @@ -17,6 +17,7 @@ import ( const ( errFailedToGetPriority string = "failed to get priority" errFailedToStoreValue string = "failed to store value" + errNegativeValue string = "value cannot be negative" ) // Errors returnable from this package. @@ -26,6 +27,7 @@ const ( var ( ErrFailedToGetPriority = errors.New(errFailedToGetPriority) ErrFailedToStoreValue = errors.New(errFailedToStoreValue) + ErrNegativeValue = errors.New(errNegativeValue) ErrEncodingPriority = errors.New("error encoding priority") ErrDecodingPriority = errors.New("error decoding priority") // ErrMismatchedMergeType - Tying to merge two ReplicatedData of different types @@ -41,3 +43,7 @@ func NewErrFailedToGetPriority(inner error) error { func NewErrFailedToStoreValue(inner error) error { return errors.Wrap(errFailedToStoreValue, inner) } + +func NewErrNegativeValue[T Incrementable](value T) error { + return errors.New(errNegativeValue, errors.NewKV("Value", value)) +} diff --git a/core/encoding.go b/core/encoding.go index 40e74915b8..eab401c7a2 100644 --- a/core/encoding.go +++ b/core/encoding.go @@ -17,7 +17,6 @@ import ( "github.com/sourcenetwork/immutable" "github.com/sourcenetwork/defradb/client" - "github.com/sourcenetwork/defradb/client/request" "github.com/sourcenetwork/defradb/encoding" ) @@ -245,16 +244,18 @@ func DecodeIndexDataStoreKey( i := len(key.Fields) descending := false + var kind client.FieldKind = client.FieldKind_DocID // If the key has more values encoded then fields on the index description, the last // value must be the docID and we treat it as a string. if i < len(indexDesc.Fields) { descending = indexDesc.Fields[i].Descending + kind = fields[i].Kind } else if i > len(indexDesc.Fields) { return IndexDataStoreKey{}, ErrInvalidKey } - var val any - data, val, err = encoding.DecodeFieldValue(data, descending) + var val client.NormalValue + data, val, err = encoding.DecodeFieldValue(data, descending, kind) if err != nil { return IndexDataStoreKey{}, err } @@ -262,34 +263,7 @@ func DecodeIndexDataStoreKey( key.Fields = append(key.Fields, IndexedField{Value: val, Descending: descending}) } - err = normalizeIndexDataStoreKeyValues(&key, fields) - return key, err -} - -// normalizeIndexDataStoreKeyValues converts all field values to standardized -// Defra Go type according to fields description. -func normalizeIndexDataStoreKeyValues(key *IndexDataStoreKey, fields []client.FieldDefinition) error { - for i := range key.Fields { - if key.Fields[i].Value == nil { - continue - } - var err error - var val any - if i == len(key.Fields)-1 && len(key.Fields)-len(fields) == 1 { - bytes, ok := key.Fields[i].Value.([]byte) - if !ok { - return client.NewErrUnexpectedType[[]byte](request.DocIDArgName, key.Fields[i].Value) - } - val = string(bytes) - } else { - val, err = NormalizeFieldValue(fields[i], key.Fields[i].Value) - } - if err != nil { - return err - } - key.Fields[i].Value = val - } - return nil + return key, nil } // EncodeIndexDataStoreKey encodes a IndexDataStoreKey to bytes to be stored as a key diff --git a/core/errors.go b/core/errors.go index 440e5778ac..d9ae72e0c4 100644 --- a/core/errors.go +++ b/core/errors.go @@ -17,6 +17,7 @@ import ( const ( errFailedToGetFieldIdOfKey string = "failed to get FieldID of Key" errInvalidFieldIndex string = "invalid field index" + errInvalidFieldValue string = "invalid field value" ) var ( @@ -24,6 +25,7 @@ var ( ErrEmptyKey = errors.New("received empty key string") ErrInvalidKey = errors.New("invalid key string") ErrInvalidFieldIndex = errors.New(errInvalidFieldIndex) + ErrInvalidFieldValue = errors.New(errInvalidFieldValue) ) // NewErrFailedToGetFieldIdOfKey returns the error indicating failure to get FieldID of Key. @@ -35,3 +37,8 @@ func NewErrFailedToGetFieldIdOfKey(inner error) error { func NewErrInvalidFieldIndex(i int) error { return errors.New(errInvalidFieldIndex, errors.NewKV("index", i)) } + +// NewErrInvalidFieldValue returns the error indicating invalid field value. +func NewErrInvalidFieldValue(reason string) error { + return errors.New(errInvalidFieldValue, errors.NewKV("Reason", reason)) +} diff --git a/core/key.go b/core/key.go index 4017d445b0..69b19efb6e 100644 --- a/core/key.go +++ b/core/key.go @@ -43,7 +43,8 @@ const ( ) const ( - COLLECTION = "/collection/id" + COLLECTION = "collection" + COLLECTION_ID = "/collection/id" COLLECTION_NAME = "/collection/name" COLLECTION_SCHEMA_VERSION = "/collection/version" COLLECTION_INDEX = "/collection/index" @@ -79,7 +80,7 @@ var _ Key = (*DataStoreKey)(nil) // value of a field in an index. type IndexedField struct { // Value is the value of the field in the index - Value any + Value client.NormalValue // Descending is true if the field is sorted in descending order Descending bool } @@ -326,7 +327,7 @@ func NewCollectionIndexKey(colID immutable.Option[uint32], indexName string) Col // Where [IndexName] might be omitted. Anything else will return an error. func NewCollectionIndexKeyFromString(key string) (CollectionIndexKey, error) { keyArr := strings.Split(key, "/") - if len(keyArr) < 4 || len(keyArr) > 5 || keyArr[1] != "collection" || keyArr[2] != "index" { + if len(keyArr) < 4 || len(keyArr) > 5 || keyArr[1] != COLLECTION || keyArr[2] != "index" { return CollectionIndexKey{}, ErrInvalidKey } @@ -564,7 +565,7 @@ func (k PrimaryDataStoreKey) ToString() string { } func (k CollectionKey) ToString() string { - return fmt.Sprintf("%s/%s", COLLECTION, strconv.Itoa(int(k.CollectionID))) + return fmt.Sprintf("%s/%s", COLLECTION_ID, strconv.Itoa(int(k.CollectionID))) } func (k CollectionKey) Bytes() []byte { diff --git a/core/key_test.go b/core/key_test.go index 3fa7f41a63..7791075a17 100644 --- a/core/key_test.go +++ b/core/key_test.go @@ -220,26 +220,26 @@ func TestIndexDatastoreKey_Bytes(t *testing.T) { Name: "collection, index and one field", CollectionID: 1, IndexID: 2, - Fields: []IndexedField{{Value: 5}}, + Fields: []IndexedField{{Value: client.NewNormalInt(5)}}, Expected: encodeKey(1, 2, 5, false), }, { Name: "collection, index and two fields", CollectionID: 1, IndexID: 2, - Fields: []IndexedField{{Value: 5}, {Value: 7}}, + Fields: []IndexedField{{Value: client.NewNormalInt(5)}, {Value: client.NewNormalInt(7)}}, Expected: encodeKey(1, 2, 5, false, 7, false), }, { Name: "no index", CollectionID: 1, - Fields: []IndexedField{{Value: 5}}, + Fields: []IndexedField{{Value: client.NewNormalInt(5)}}, Expected: encoding.EncodeUvarintAscending([]byte{'/'}, 1), }, { Name: "no collection", IndexID: 2, - Fields: []IndexedField{{Value: 5}}, + Fields: []IndexedField{{Value: client.NewNormalInt(5)}}, Expected: []byte{}, }, } @@ -255,12 +255,12 @@ func TestIndexDatastoreKey_Bytes(t *testing.T) { } func TestIndexDatastoreKey_ToString(t *testing.T) { - key := NewIndexDataStoreKey(1, 2, []IndexedField{{Value: 5}}) + key := NewIndexDataStoreKey(1, 2, []IndexedField{{Value: client.NewNormalInt(5)}}) assert.Equal(t, key.ToString(), string(encodeKey(1, 2, 5, false))) } func TestIndexDatastoreKey_ToDS(t *testing.T) { - key := NewIndexDataStoreKey(1, 2, []IndexedField{{Value: 5}}) + key := NewIndexDataStoreKey(1, 2, []IndexedField{{Value: client.NewNormalInt(5)}}) assert.Equal(t, key.ToDS(), ds.NewKey(string(encodeKey(1, 2, 5, false)))) } @@ -288,7 +288,7 @@ func TestDecodeIndexDataStoreKey(t *testing.T) { Fields: []client.IndexedFieldDescription{{}}, }, inputBytes: encodeKey(colID, indexID, 5, false), - expectedFields: []IndexedField{{Value: int64(5)}}, + expectedFields: []IndexedField{{Value: client.NewNormalInt(5)}}, }, { name: "two fields (one descending)", @@ -296,8 +296,11 @@ func TestDecodeIndexDataStoreKey(t *testing.T) { ID: indexID, Fields: []client.IndexedFieldDescription{{}, {Descending: true}}, }, - inputBytes: encodeKey(colID, indexID, 5, false, 7, true), - expectedFields: []IndexedField{{Value: int64(5)}, {Value: int64(7), Descending: true}}, + inputBytes: encodeKey(colID, indexID, 5, false, 7, true), + expectedFields: []IndexedField{ + {Value: client.NewNormalInt(5)}, + {Value: client.NewNormalInt(7), Descending: true}, + }, }, { name: "last encoded value without matching field description is docID", @@ -305,9 +308,12 @@ func TestDecodeIndexDataStoreKey(t *testing.T) { ID: indexID, Fields: []client.IndexedFieldDescription{{}}, }, - inputBytes: encoding.EncodeStringAscending(append(encodeKey(1, indexID, 5, false), '/'), "docID"), - expectedFields: []IndexedField{{Value: int64(5)}, {Value: "docID"}}, - fieldKinds: []client.FieldKind{client.FieldKind_NILLABLE_INT}, + inputBytes: encoding.EncodeStringAscending(append(encodeKey(1, indexID, 5, false), '/'), "docID"), + expectedFields: []IndexedField{ + {Value: client.NewNormalInt(5)}, + {Value: client.NewNormalString("docID")}, + }, + fieldKinds: []client.FieldKind{client.FieldKind_NILLABLE_INT}, }, } @@ -384,11 +390,6 @@ func TestDecodeIndexDataStoreKey_InvalidKey(t *testing.T) { val: encodeKey(colID, indexID, 5, false, 7, false, 9, false), numFields: 2, }, - { - name: "invalid docID value", - val: encoding.EncodeUvarintAscending(append(encodeKey(colID, indexID, 5, false), '/'), 5), - numFields: 1, - }, } indexDesc := client.IndexDescription{ID: indexID, Fields: []client.IndexedFieldDescription{{}}} for _, c := range cases { diff --git a/core/parser.go b/core/parser.go index 05a90d0526..619f3fd1c2 100644 --- a/core/parser.go +++ b/core/parser.go @@ -51,6 +51,10 @@ type Parser interface { NewFilterFromString(collectionType string, body string) (immutable.Option[request.Filter], error) // ParseSDL parses an SDL string into a set of collection descriptions. + // + // The parsing should validate the syntax, but not validate what that syntax expresses + // is valid or not, i.e. we don't want the parser to make remote calls to verify the + // policy description is valid or not (that is the callers responsiblity). ParseSDL(ctx context.Context, schemaString string) ([]client.CollectionDefinition, error) // Adds the given schema to this parser's model. diff --git a/datastore/blockstore.go b/datastore/blockstore.go index 8525f8410e..be25894a3d 100644 --- a/datastore/blockstore.go +++ b/datastore/blockstore.go @@ -64,7 +64,6 @@ func (bs *bstore) HashOnRead(enabled bool) { // Get returns a block from the blockstore. func (bs *bstore) Get(ctx context.Context, k cid.Cid) (blocks.Block, error) { if !k.Defined() { - log.Error(ctx, "Undefined CID in blockstore") return nil, ipld.ErrNotFound{Cid: k} } bdata, err := bs.store.Get(ctx, dshelp.MultihashToDsKey(k.Hash())) @@ -164,13 +163,13 @@ func (bs *bstore) AllKeysChan(ctx context.Context) (<-chan cid.Cid, error) { return } if e.Error != nil { - log.ErrorE(ctx, "Blockstore.AllKeysChan errored", e.Error) + log.ErrorContextE(ctx, "Blockstore.AllKeysChan errored", e.Error) return } hash, err := dshelp.DsKeyToMultihash(ds.RawKey(e.Key)) if err != nil { - log.ErrorE(ctx, "Error parsing key from binary", err) + log.ErrorContextE(ctx, "Error parsing key from binary", err) continue } k := cid.NewCidV1(cid.Raw, hash) diff --git a/datastore/store.go b/datastore/store.go index 759eef01db..7f2764a65d 100644 --- a/datastore/store.go +++ b/datastore/store.go @@ -14,12 +14,13 @@ import ( blockstore "github.com/ipfs/boxo/blockstore" ds "github.com/ipfs/go-datastore" + "github.com/sourcenetwork/corelog" + "github.com/sourcenetwork/defradb/datastore/iterable" - "github.com/sourcenetwork/defradb/logging" ) var ( - log = logging.MustNewLogger("store") + log = corelog.NewLogger("store") ) // RootStore wraps Batching and TxnDatastore requiring datastore to support both batching and transactions. diff --git a/db/backup.go b/db/backup.go index d47b3534e1..1353376f34 100644 --- a/db/backup.go +++ b/db/backup.go @@ -19,10 +19,9 @@ import ( "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/client/request" - "github.com/sourcenetwork/defradb/datastore" ) -func (db *db) basicImport(ctx context.Context, txn datastore.Txn, filepath string) (err error) { +func (db *db) basicImport(ctx context.Context, filepath string) (err error) { f, err := os.Open(filepath) if err != nil { return NewErrOpenFile(err, filepath) @@ -49,7 +48,7 @@ func (db *db) basicImport(ctx context.Context, txn datastore.Txn, filepath strin return err } colName := t.(string) - col, err := db.getCollectionByName(ctx, txn, colName) + col, err := db.getCollectionByName(ctx, colName) if err != nil { return NewErrFailedToGetCollection(colName, err) } @@ -72,7 +71,7 @@ func (db *db) basicImport(ctx context.Context, txn datastore.Txn, filepath strin // check if self referencing and remove from docMap for key creation resetMap := map[string]any{} for _, field := range col.Schema().Fields { - if field.Kind == client.FieldKind_FOREIGN_OBJECT { + if field.Kind.IsObject() && !field.Kind.IsArray() { if val, ok := docMap[field.Name+request.RelatedObjectID]; ok { if docMap[request.NewDocIDFieldName] == val { resetMap[field.Name+request.RelatedObjectID] = val @@ -85,12 +84,12 @@ func (db *db) basicImport(ctx context.Context, txn datastore.Txn, filepath strin delete(docMap, request.DocIDFieldName) delete(docMap, request.NewDocIDFieldName) - doc, err := client.NewDocFromMap(docMap, col.Schema()) + doc, err := client.NewDocFromMap(docMap, col.Definition()) if err != nil { return NewErrDocFromMap(err) } - err = col.WithTxn(txn).Create(ctx, doc) + err = col.Create(ctx, doc) if err != nil { return NewErrDocCreate(err) } @@ -101,7 +100,7 @@ func (db *db) basicImport(ctx context.Context, txn datastore.Txn, filepath strin if err != nil { return NewErrDocUpdate(err) } - err = col.WithTxn(txn).Update(ctx, doc) + err = col.Update(ctx, doc) if err != nil { return NewErrDocUpdate(err) } @@ -116,19 +115,19 @@ func (db *db) basicImport(ctx context.Context, txn datastore.Txn, filepath strin return nil } -func (db *db) basicExport(ctx context.Context, txn datastore.Txn, config *client.BackupConfig) (err error) { +func (db *db) basicExport(ctx context.Context, config *client.BackupConfig) (err error) { // old key -> new Key keyChangeCache := map[string]string{} cols := []client.Collection{} if len(config.Collections) == 0 { - cols, err = db.getCollections(ctx, txn, client.CollectionFetchOptions{}) + cols, err = db.getCollections(ctx, client.CollectionFetchOptions{}) if err != nil { return NewErrFailedToGetAllCollections(err) } } else { for _, colName := range config.Collections { - col, err := db.getCollectionByName(ctx, txn, colName) + col, err := db.getCollectionByName(ctx, colName) if err != nil { return NewErrFailedToGetCollection(colName, err) } @@ -188,8 +187,7 @@ func (db *db) basicExport(ctx context.Context, txn datastore.Txn, config *client if err != nil { return err } - colTxn := col.WithTxn(txn) - docIDsCh, err := colTxn.GetAllDocIDs(ctx) + docIDsCh, err := col.GetAllDocIDs(ctx) if err != nil { return err } @@ -205,7 +203,7 @@ func (db *db) basicExport(ctx context.Context, txn datastore.Txn, config *client return err } } - doc, err := colTxn.Get(ctx, docResultWithID.ID, false) + doc, err := col.Get(ctx, docResultWithID.ID, false) if err != nil { return err } @@ -214,9 +212,8 @@ func (db *db) basicExport(ctx context.Context, txn datastore.Txn, config *client refFieldName := "" // replace any foreign key if it needs to be changed for _, field := range col.Schema().Fields { - switch field.Kind { - case client.FieldKind_FOREIGN_OBJECT: - if _, ok := colNameCache[field.Schema]; !ok { + if field.Kind.IsObject() && !field.Kind.IsArray() { + if _, ok := colNameCache[field.Kind.Underlying()]; !ok { continue } if foreignKey, err := doc.Get(field.Name + request.RelatedObjectID); err == nil { @@ -230,9 +227,9 @@ func (db *db) basicExport(ctx context.Context, txn datastore.Txn, config *client refFieldName = field.Name + request.RelatedObjectID } } else { - foreignCol, err := db.getCollectionByName(ctx, txn, field.Schema) + foreignCol, err := db.getCollectionByName(ctx, field.Kind.Underlying()) if err != nil { - return NewErrFailedToGetCollection(field.Schema, err) + return NewErrFailedToGetCollection(field.Kind.Underlying(), err) } foreignDocID, err := client.NewDocIDFromString(foreignKey.(string)) if err != nil { @@ -260,7 +257,7 @@ func (db *db) basicExport(ctx context.Context, txn datastore.Txn, config *client refFieldName = field.Name + request.RelatedObjectID } - newForeignDoc, err := client.NewDocFromMap(oldForeignDoc, foreignCol.Schema()) + newForeignDoc, err := client.NewDocFromMap(oldForeignDoc, foreignCol.Definition()) if err != nil { return err } @@ -291,7 +288,7 @@ func (db *db) basicExport(ctx context.Context, txn datastore.Txn, config *client delete(docM, refFieldName) } - newDoc, err := client.NewDocFromMap(docM, col.Schema()) + newDoc, err := client.NewDocFromMap(docM, col.Definition()) if err != nil { return err } diff --git a/db/backup_test.go b/db/backup_test.go index 093b1a1a3f..486080db81 100644 --- a/db/backup_test.go +++ b/db/backup_test.go @@ -18,6 +18,7 @@ import ( "github.com/stretchr/testify/require" + acpIdentity "github.com/sourcenetwork/defradb/acp/identity" "github.com/sourcenetwork/defradb/client" ) @@ -40,10 +41,10 @@ func TestBasicExport_WithNormalFormatting_NoError(t *testing.T) { col1, err := db.GetCollectionByName(ctx, "User") require.NoError(t, err) - doc1, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col1.Schema()) + doc1, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col1.Definition()) require.NoError(t, err) - doc2, err := client.NewDocFromJSON([]byte(`{"name": "Bob", "age": 40}`), col1.Schema()) + doc2, err := client.NewDocFromJSON([]byte(`{"name": "Bob", "age": 40}`), col1.Definition()) require.NoError(t, err) err = col1.Create(ctx, doc1) @@ -55,7 +56,7 @@ func TestBasicExport_WithNormalFormatting_NoError(t *testing.T) { col2, err := db.GetCollectionByName(ctx, "Address") require.NoError(t, err) - doc3, err := client.NewDocFromJSON([]byte(`{"street": "101 Maple St", "city": "Toronto"}`), col2.Schema()) + doc3, err := client.NewDocFromJSON([]byte(`{"street": "101 Maple St", "city": "Toronto"}`), col2.Definition()) require.NoError(t, err) err = col2.Create(ctx, doc3) @@ -65,8 +66,11 @@ func TestBasicExport_WithNormalFormatting_NoError(t *testing.T) { require.NoError(t, err) defer txn.Discard(ctx) + ctx = SetContextIdentity(ctx, acpIdentity.None) + ctx = SetContextTxn(ctx, txn) + filepath := t.TempDir() + "/test.json" - err = db.basicExport(ctx, txn, &client.BackupConfig{Filepath: filepath}) + err = db.basicExport(ctx, &client.BackupConfig{Filepath: filepath}) require.NoError(t, err) b, err := os.ReadFile(filepath) @@ -102,10 +106,10 @@ func TestBasicExport_WithPrettyFormatting_NoError(t *testing.T) { col1, err := db.GetCollectionByName(ctx, "User") require.NoError(t, err) - doc1, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col1.Schema()) + doc1, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col1.Definition()) require.NoError(t, err) - doc2, err := client.NewDocFromJSON([]byte(`{"name": "Bob", "age": 40}`), col1.Schema()) + doc2, err := client.NewDocFromJSON([]byte(`{"name": "Bob", "age": 40}`), col1.Definition()) require.NoError(t, err) err = col1.Create(ctx, doc1) @@ -117,7 +121,7 @@ func TestBasicExport_WithPrettyFormatting_NoError(t *testing.T) { col2, err := db.GetCollectionByName(ctx, "Address") require.NoError(t, err) - doc3, err := client.NewDocFromJSON([]byte(`{"street": "101 Maple St", "city": "Toronto"}`), col2.Schema()) + doc3, err := client.NewDocFromJSON([]byte(`{"street": "101 Maple St", "city": "Toronto"}`), col2.Definition()) require.NoError(t, err) err = col2.Create(ctx, doc3) @@ -127,8 +131,11 @@ func TestBasicExport_WithPrettyFormatting_NoError(t *testing.T) { require.NoError(t, err) defer txn.Discard(ctx) + ctx = SetContextIdentity(ctx, acpIdentity.None) + ctx = SetContextTxn(ctx, txn) + filepath := t.TempDir() + "/test.json" - err = db.basicExport(ctx, txn, &client.BackupConfig{Filepath: filepath, Pretty: true}) + err = db.basicExport(ctx, &client.BackupConfig{Filepath: filepath, Pretty: true}) require.NoError(t, err) b, err := os.ReadFile(filepath) @@ -164,10 +171,10 @@ func TestBasicExport_WithSingleCollection_NoError(t *testing.T) { col1, err := db.GetCollectionByName(ctx, "User") require.NoError(t, err) - doc1, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col1.Schema()) + doc1, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col1.Definition()) require.NoError(t, err) - doc2, err := client.NewDocFromJSON([]byte(`{"name": "Bob", "age": 40}`), col1.Schema()) + doc2, err := client.NewDocFromJSON([]byte(`{"name": "Bob", "age": 40}`), col1.Definition()) require.NoError(t, err) err = col1.Create(ctx, doc1) @@ -179,7 +186,7 @@ func TestBasicExport_WithSingleCollection_NoError(t *testing.T) { col2, err := db.GetCollectionByName(ctx, "Address") require.NoError(t, err) - doc3, err := client.NewDocFromJSON([]byte(`{"street": "101 Maple St", "city": "Toronto"}`), col2.Schema()) + doc3, err := client.NewDocFromJSON([]byte(`{"street": "101 Maple St", "city": "Toronto"}`), col2.Definition()) require.NoError(t, err) err = col2.Create(ctx, doc3) @@ -189,8 +196,11 @@ func TestBasicExport_WithSingleCollection_NoError(t *testing.T) { require.NoError(t, err) defer txn.Discard(ctx) + ctx = SetContextIdentity(ctx, acpIdentity.None) + ctx = SetContextTxn(ctx, txn) + filepath := t.TempDir() + "/test.json" - err = db.basicExport(ctx, txn, &client.BackupConfig{Filepath: filepath, Collections: []string{"Address"}}) + err = db.basicExport(ctx, &client.BackupConfig{Filepath: filepath, Collections: []string{"Address"}}) require.NoError(t, err) b, err := os.ReadFile(filepath) @@ -227,10 +237,10 @@ func TestBasicExport_WithMultipleCollectionsAndUpdate_NoError(t *testing.T) { col1, err := db.GetCollectionByName(ctx, "User") require.NoError(t, err) - doc1, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col1.Schema()) + doc1, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col1.Definition()) require.NoError(t, err) - doc2, err := client.NewDocFromJSON([]byte(`{"name": "Bob", "age": 31}`), col1.Schema()) + doc2, err := client.NewDocFromJSON([]byte(`{"name": "Bob", "age": 31}`), col1.Definition()) require.NoError(t, err) err = col1.Create(ctx, doc1) @@ -242,10 +252,10 @@ func TestBasicExport_WithMultipleCollectionsAndUpdate_NoError(t *testing.T) { col2, err := db.GetCollectionByName(ctx, "Book") require.NoError(t, err) - doc3, err := client.NewDocFromJSON([]byte(`{"name": "John and the sourcerers' stone", "author": "bae-e933420a-988a-56f8-8952-6c245aebd519"}`), col2.Schema()) + doc3, err := client.NewDocFromJSON([]byte(`{"name": "John and the sourcerers' stone", "author": "bae-e933420a-988a-56f8-8952-6c245aebd519"}`), col2.Definition()) require.NoError(t, err) - doc4, err := client.NewDocFromJSON([]byte(`{"name": "Game of chains", "author": "bae-e933420a-988a-56f8-8952-6c245aebd519"}`), col2.Schema()) + doc4, err := client.NewDocFromJSON([]byte(`{"name": "Game of chains", "author": "bae-e933420a-988a-56f8-8952-6c245aebd519"}`), col2.Definition()) require.NoError(t, err) err = col2.Create(ctx, doc3) @@ -263,8 +273,11 @@ func TestBasicExport_WithMultipleCollectionsAndUpdate_NoError(t *testing.T) { require.NoError(t, err) defer txn.Discard(ctx) + ctx = SetContextIdentity(ctx, acpIdentity.None) + ctx = SetContextTxn(ctx, txn) + filepath := t.TempDir() + "/test.json" - err = db.basicExport(ctx, txn, &client.BackupConfig{Filepath: filepath}) + err = db.basicExport(ctx, &client.BackupConfig{Filepath: filepath}) require.NoError(t, err) b, err := os.ReadFile(filepath) @@ -300,10 +313,10 @@ func TestBasicExport_EnsureFileOverwrite_NoError(t *testing.T) { col1, err := db.GetCollectionByName(ctx, "User") require.NoError(t, err) - doc1, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col1.Schema()) + doc1, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col1.Definition()) require.NoError(t, err) - doc2, err := client.NewDocFromJSON([]byte(`{"name": "Bob", "age": 40}`), col1.Schema()) + doc2, err := client.NewDocFromJSON([]byte(`{"name": "Bob", "age": 40}`), col1.Definition()) require.NoError(t, err) err = col1.Create(ctx, doc1) @@ -315,7 +328,7 @@ func TestBasicExport_EnsureFileOverwrite_NoError(t *testing.T) { col2, err := db.GetCollectionByName(ctx, "Address") require.NoError(t, err) - doc3, err := client.NewDocFromJSON([]byte(`{"street": "101 Maple St", "city": "Toronto"}`), col2.Schema()) + doc3, err := client.NewDocFromJSON([]byte(`{"street": "101 Maple St", "city": "Toronto"}`), col2.Definition()) require.NoError(t, err) err = col2.Create(ctx, doc3) @@ -325,6 +338,9 @@ func TestBasicExport_EnsureFileOverwrite_NoError(t *testing.T) { require.NoError(t, err) defer txn.Discard(ctx) + ctx = SetContextIdentity(ctx, acpIdentity.None) + ctx = SetContextTxn(ctx, txn) + filepath := t.TempDir() + "/test.json" err = os.WriteFile( @@ -334,7 +350,7 @@ func TestBasicExport_EnsureFileOverwrite_NoError(t *testing.T) { ) require.NoError(t, err) - err = db.basicExport(ctx, txn, &client.BackupConfig{Filepath: filepath, Collections: []string{"Address"}}) + err = db.basicExport(ctx, &client.BackupConfig{Filepath: filepath, Collections: []string{"Address"}}) require.NoError(t, err) b, err := os.ReadFile(filepath) @@ -370,6 +386,9 @@ func TestBasicImport_WithMultipleCollectionsAndObjects_NoError(t *testing.T) { txn, err := db.NewTxn(ctx, false) require.NoError(t, err) + ctx = SetContextIdentity(ctx, acpIdentity.None) + ctx = SetContextTxn(ctx, txn) + filepath := t.TempDir() + "/test.json" err = os.WriteFile( @@ -379,7 +398,7 @@ func TestBasicImport_WithMultipleCollectionsAndObjects_NoError(t *testing.T) { ) require.NoError(t, err) - err = db.basicImport(ctx, txn, filepath) + err = db.basicImport(ctx, filepath) require.NoError(t, err) err = txn.Commit(ctx) require.NoError(t, err) @@ -387,7 +406,10 @@ func TestBasicImport_WithMultipleCollectionsAndObjects_NoError(t *testing.T) { txn, err = db.NewTxn(ctx, true) require.NoError(t, err) - col1, err := db.getCollectionByName(ctx, txn, "Address") + ctx = SetContextIdentity(ctx, acpIdentity.None) + ctx = SetContextTxn(ctx, txn) + + col1, err := db.getCollectionByName(ctx, "Address") require.NoError(t, err) key1, err := client.NewDocIDFromString("bae-8096f2c1-ea4c-5226-8ba5-17fc4b68ac1f") @@ -395,7 +417,7 @@ func TestBasicImport_WithMultipleCollectionsAndObjects_NoError(t *testing.T) { _, err = col1.Get(ctx, key1, false) require.NoError(t, err) - col2, err := db.getCollectionByName(ctx, txn, "User") + col2, err := db.getCollectionByName(ctx, "User") require.NoError(t, err) key2, err := client.NewDocIDFromString("bae-b94880d1-e6d2-542f-b9e0-5a369fafd0df") @@ -428,6 +450,7 @@ func TestBasicImport_WithJSONArray_ReturnError(t *testing.T) { txn, err := db.NewTxn(ctx, false) require.NoError(t, err) + ctx = SetContextTxn(ctx, txn) filepath := t.TempDir() + "/test.json" @@ -438,7 +461,7 @@ func TestBasicImport_WithJSONArray_ReturnError(t *testing.T) { ) require.NoError(t, err) - err = db.basicImport(ctx, txn, filepath) + err = db.basicImport(ctx, filepath) require.ErrorIs(t, err, ErrExpectedJSONObject) err = txn.Commit(ctx) require.NoError(t, err) @@ -463,6 +486,7 @@ func TestBasicImport_WithObjectCollection_ReturnError(t *testing.T) { txn, err := db.NewTxn(ctx, false) require.NoError(t, err) + ctx = SetContextTxn(ctx, txn) filepath := t.TempDir() + "/test.json" @@ -473,7 +497,7 @@ func TestBasicImport_WithObjectCollection_ReturnError(t *testing.T) { ) require.NoError(t, err) - err = db.basicImport(ctx, txn, filepath) + err = db.basicImport(ctx, filepath) require.ErrorIs(t, err, ErrExpectedJSONArray) err = txn.Commit(ctx) require.NoError(t, err) @@ -498,6 +522,7 @@ func TestBasicImport_WithInvalidFilepath_ReturnError(t *testing.T) { txn, err := db.NewTxn(ctx, false) require.NoError(t, err) + ctx = SetContextTxn(ctx, txn) filepath := t.TempDir() + "/test.json" @@ -509,7 +534,7 @@ func TestBasicImport_WithInvalidFilepath_ReturnError(t *testing.T) { require.NoError(t, err) wrongFilepath := t.TempDir() + "/some/test.json" - err = db.basicImport(ctx, txn, wrongFilepath) + err = db.basicImport(ctx, wrongFilepath) require.ErrorIs(t, err, os.ErrNotExist) err = txn.Commit(ctx) require.NoError(t, err) @@ -534,6 +559,7 @@ func TestBasicImport_WithInvalidCollection_ReturnError(t *testing.T) { txn, err := db.NewTxn(ctx, false) require.NoError(t, err) + ctx = SetContextTxn(ctx, txn) filepath := t.TempDir() + "/test.json" @@ -544,7 +570,7 @@ func TestBasicImport_WithInvalidCollection_ReturnError(t *testing.T) { ) require.NoError(t, err) - err = db.basicImport(ctx, txn, filepath) + err = db.basicImport(ctx, filepath) require.ErrorIs(t, err, ErrFailedToGetCollection) err = txn.Commit(ctx) require.NoError(t, err) diff --git a/db/base/collection_keys.go b/db/base/collection_keys.go index 1277b96a81..98584454ab 100644 --- a/db/base/collection_keys.go +++ b/db/base/collection_keys.go @@ -47,7 +47,7 @@ func MakePrimaryIndexKeyForCRDT( WithInstanceInfo(key). WithFieldId(core.COMPOSITE_NAMESPACE), nil - case client.LWW_REGISTER, client.PN_COUNTER: + case client.LWW_REGISTER, client.PN_COUNTER, client.P_COUNTER: field, ok := c.GetFieldByName(fieldName) if !ok { return core.DataStoreKey{}, client.NewErrFieldNotExist(fieldName) diff --git a/db/collection.go b/db/collection.go index c9d311f01a..e84530d3e7 100644 --- a/db/collection.go +++ b/db/collection.go @@ -13,10 +13,13 @@ package db import ( "bytes" "context" + "encoding/json" "fmt" + "reflect" "strconv" "strings" + jsonpatch "github.com/evanphx/json-patch/v5" "github.com/ipfs/go-cid" ds "github.com/ipfs/go-datastore" "github.com/ipfs/go-datastore/query" @@ -24,10 +27,10 @@ import ( "github.com/lens-vm/lens/host-go/config/model" "github.com/sourcenetwork/immutable" + "github.com/sourcenetwork/defradb/acp" "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/client/request" "github.com/sourcenetwork/defradb/core" - "github.com/sourcenetwork/defradb/datastore" "github.com/sourcenetwork/defradb/db/base" "github.com/sourcenetwork/defradb/db/description" "github.com/sourcenetwork/defradb/db/fetcher" @@ -42,18 +45,8 @@ var _ client.Collection = (*collection)(nil) // collection stores data records at Documents, which are gathered // together under a collection name. This is analogous to SQL Tables. type collection struct { - db *db - - // txn represents any externally provided [datastore.Txn] for which any - // operation on this [collection] instance should be scoped to. - // - // If this has no value, operations requiring a transaction should use an - // implicit internally managed transaction, which only lives for duration - // of the operation in question. - txn immutable.Option[datastore.Txn] - - def client.CollectionDefinition - + db *db + def client.CollectionDefinition indexes []CollectionIndex fetcherFactory func() fetcher.Fetcher } @@ -91,11 +84,12 @@ func (c *collection) newFetcher() fetcher.Fetcher { // Note: Collection.ID is an auto-incrementing value that is generated by the database. func (db *db) createCollection( ctx context.Context, - txn datastore.Txn, def client.CollectionDefinition, + newDefinitions []client.CollectionDefinition, ) (client.Collection, error) { schema := def.Schema desc := def.Description + txn := mustGetContextTxn(ctx) if desc.Name.HasValue() { exists, err := description.HasCollectionByName(ctx, txn, desc.Name.Value()) @@ -107,16 +101,46 @@ func (db *db) createCollection( } } - colSeq, err := db.getSequence(ctx, txn, core.CollectionIDSequenceKey{}) + existingDefinitions, err := db.getAllActiveDefinitions(ctx) + if err != nil { + return nil, err + } + + schemaByName := map[string]client.SchemaDescription{} + for _, existingDefinition := range existingDefinitions { + schemaByName[existingDefinition.Schema.Name] = existingDefinition.Schema + } + for _, newDefinition := range newDefinitions { + schemaByName[newDefinition.Schema.Name] = newDefinition.Schema + } + + _, err = validateUpdateSchemaFields(schemaByName, client.SchemaDescription{}, schema) + if err != nil { + return nil, err + } + + definitionsByName := map[string]client.CollectionDefinition{} + for _, existingDefinition := range existingDefinitions { + definitionsByName[existingDefinition.GetName()] = existingDefinition + } + for _, newDefinition := range newDefinitions { + definitionsByName[newDefinition.GetName()] = newDefinition + } + err = db.validateNewCollection(def, definitionsByName) + if err != nil { + return nil, err + } + + colSeq, err := db.getSequence(ctx, core.CollectionIDSequenceKey{}) if err != nil { return nil, err } - colID, err := colSeq.next(ctx, txn) + colID, err := colSeq.next(ctx) if err != nil { return nil, err } - fieldSeq, err := db.getSequence(ctx, txn, core.NewFieldIDSequenceKey(uint32(colID))) + fieldSeq, err := db.getSequence(ctx, core.NewFieldIDSequenceKey(uint32(colID))) if err != nil { return nil, err } @@ -129,27 +153,26 @@ func (db *db) createCollection( return nil, err } desc.SchemaVersionID = schema.VersionID - for _, globalField := range schema.Fields { + for _, localField := range desc.Fields { var fieldID uint64 - if globalField.Name == request.DocIDFieldName { + if localField.Name == request.DocIDFieldName { // There is no hard technical requirement for this, we just think it looks nicer // if the doc id is at the zero index. It makes it look a little nicer in commit // queries too. fieldID = 0 } else { - fieldID, err = fieldSeq.next(ctx, txn) + fieldID, err = fieldSeq.next(ctx) if err != nil { return nil, err } } - desc.Fields = append( - desc.Fields, - client.CollectionFieldDescription{ - Name: globalField.Name, - ID: client.FieldID(fieldID), - }, - ) + for i := range desc.Fields { + if desc.Fields[i].Name == localField.Name { + desc.Fields[i].ID = client.FieldID(fieldID) + break + } + } } desc, err = description.SaveCollection(ctx, txn, desc) @@ -158,13 +181,45 @@ func (db *db) createCollection( } col := db.newCollection(desc, schema) + for _, index := range desc.Indexes { - if _, err := col.createIndex(ctx, txn, index); err != nil { + if _, err := col.createIndex(ctx, index); err != nil { return nil, err } } - return db.getCollectionByID(ctx, txn, desc.ID) + return db.getCollectionByID(ctx, desc.ID) +} + +// validateCollectionDefinitionPolicyDesc validates that the policy definition is valid, beyond syntax. +// +// Ensures that the information within the policy definition makes sense, +// this function might also make relevant remote calls using the acp system. +func (db *db) validateCollectionDefinitionPolicyDesc( + ctx context.Context, + policyDesc immutable.Option[client.PolicyDescription], +) error { + if !policyDesc.HasValue() { + // No policy validation needed, whether acp exists or not doesn't matter. + return nil + } + + // If there is a policy specified, but the database does not have + // acp enabled/available return an error, database must have an acp available + // to enable access control (inorder to adhere to the policy specified). + if !db.acp.HasValue() { + return ErrCanNotHavePolicyWithoutACP + } + + // If we have the policy specified on the collection, and acp is available/enabled, + // then using the acp system we need to ensure the policy id specified + // actually exists as a policy, and the resource name exists on that policy + // and that the resource is a valid DPI. + return db.acp.Value().ValidateResourceExistsOnValidDPI( + ctx, + policyDesc.Value().ID, + policyDesc.Value().ResourceName, + ) } // updateSchema updates the persisted schema description matching the name of the given @@ -177,7 +232,6 @@ func (db *db) createCollection( // applied. func (db *db) updateSchema( ctx context.Context, - txn datastore.Txn, existingSchemaByName map[string]client.SchemaDescription, proposedDescriptionsByName map[string]client.SchemaDescription, schema client.SchemaDescription, @@ -198,13 +252,12 @@ func (db *db) updateSchema( } for _, field := range schema.Fields { - if field.Kind == client.FieldKind_FOREIGN_OBJECT { + if field.Kind.IsObject() && !field.Kind.IsArray() { idFieldName := field.Name + "_id" if _, ok := schema.GetFieldByName(idFieldName); !ok { schema.Fields = append(schema.Fields, client.SchemaFieldDescription{ - Name: idFieldName, - Kind: client.FieldKind_DocID, - RelationName: field.RelationName, + Name: idFieldName, + Kind: client.FieldKind_DocID, }) } } @@ -218,6 +271,7 @@ func (db *db) updateSchema( } } + txn := mustGetContextTxn(ctx) previousVersionID := schema.VersionID schema, err = description.CreateSchemaVersion(ctx, txn, schema) if err != nil { @@ -233,7 +287,7 @@ func (db *db) updateSchema( return err } - colSeq, err := db.getSequence(ctx, txn, core.CollectionIDSequenceKey{}) + colSeq, err := db.getSequence(ctx, core.CollectionIDSequenceKey{}) if err != nil { return err } @@ -263,7 +317,7 @@ func (db *db) updateSchema( existingCol.RootID = col.RootID } - fieldSeq, err := db.getSequence(ctx, txn, core.NewFieldIDSequenceKey(existingCol.RootID)) + fieldSeq, err := db.getSequence(ctx, core.NewFieldIDSequenceKey(existingCol.RootID)) if err != nil { return err } @@ -276,7 +330,7 @@ func (db *db) updateSchema( if ok { fieldID = existingField.ID } else { - nextFieldID, err := fieldSeq.next(ctx, txn) + nextFieldID, err := fieldSeq.next(ctx) if err != nil { return err } @@ -302,12 +356,12 @@ func (db *db) updateSchema( } if !isExistingCol { - colID, err := colSeq.next(ctx, txn) + colID, err := colSeq.next(ctx) if err != nil { return err } - fieldSeq, err := db.getSequence(ctx, txn, core.NewFieldIDSequenceKey(col.RootID)) + fieldSeq, err := db.getSequence(ctx, core.NewFieldIDSequenceKey(col.RootID)) if err != nil { return err } @@ -327,7 +381,7 @@ func (db *db) updateSchema( for _, globalField := range schema.Fields { _, exists := col.GetFieldByName(globalField.Name) if !exists { - fieldID, err := fieldSeq.next(ctx, txn) + fieldID, err := fieldSeq.next(ctx) if err != nil { return err } @@ -359,7 +413,7 @@ func (db *db) updateSchema( if setAsActiveVersion { // activate collection versions using the new schema ID. This call must be made after // all new collection versions have been saved. - err = db.setActiveSchemaVersion(ctx, txn, schema.VersionID) + err = db.setActiveSchemaVersion(ctx, schema.VersionID) if err != nil { return err } @@ -433,65 +487,26 @@ func validateUpdateSchemaFields( // If the field is new, then the collection has changed hasChanged = hasChanged || !fieldAlreadyExists - if !fieldAlreadyExists && (proposedField.Kind == client.FieldKind_FOREIGN_OBJECT || - proposedField.Kind == client.FieldKind_FOREIGN_OBJECT_ARRAY) { - if proposedField.Schema == "" { - return false, NewErrRelationalFieldMissingSchema(proposedField.Name, proposedField.Kind) - } - - relatedDesc, relatedDescFound := descriptionsByName[proposedField.Schema] + if !fieldAlreadyExists && proposedField.Kind.IsObject() { + _, relatedDescFound := descriptionsByName[proposedField.Kind.Underlying()] if !relatedDescFound { - return false, NewErrSchemaNotFound(proposedField.Name, proposedField.Schema) + return false, NewErrFieldKindNotFound(proposedField.Name, proposedField.Kind.Underlying()) } - if proposedField.RelationName == "" { - return false, NewErrRelationalFieldMissingRelationName(proposedField.Name) - } - - if proposedField.IsPrimaryRelation { - if proposedField.Kind == client.FieldKind_FOREIGN_OBJECT_ARRAY { - return false, NewErrPrimarySideOnMany(proposedField.Name) - } - } - - if proposedField.Kind == client.FieldKind_FOREIGN_OBJECT { + if proposedField.Kind.IsObject() && !proposedField.Kind.IsArray() { idFieldName := proposedField.Name + request.RelatedObjectID idField, idFieldFound := proposedDesc.GetFieldByName(idFieldName) if idFieldFound { if idField.Kind != client.FieldKind_DocID { return false, NewErrRelationalFieldIDInvalidType(idField.Name, client.FieldKind_DocID, idField.Kind) } - - if idField.RelationName == "" { - return false, NewErrRelationalFieldMissingRelationName(idField.Name) - } } } + } - var relatedFieldFound bool - var relatedField client.SchemaFieldDescription - for _, field := range relatedDesc.Fields { - if field.RelationName == proposedField.RelationName && - field.Kind != client.FieldKind_DocID && - !(relatedDesc.Name == proposedDesc.Name && field.Name == proposedField.Name) { - relatedFieldFound = true - relatedField = field - break - } - } - - if !relatedFieldFound { - return false, client.NewErrRelationOneSided(proposedField.Name, proposedField.Schema) - } - - if !(proposedField.IsPrimaryRelation || relatedField.IsPrimaryRelation) { - return false, NewErrPrimarySideNotDefined(proposedField.RelationName) - } - - if proposedField.IsPrimaryRelation && relatedField.IsPrimaryRelation { - return false, NewErrBothSidesPrimary(proposedField.RelationName) - } + if proposedField.Kind.IsObjectArray() { + return false, NewErrSecondaryFieldOnSchema(proposedField.Name) } if _, isDuplicate := newFieldNames[proposedField.Name]; isDuplicate { @@ -526,6 +541,513 @@ func validateUpdateSchemaFields( return hasChanged, nil } +func (db *db) patchCollection( + ctx context.Context, + patchString string, +) error { + patch, err := jsonpatch.DecodePatch([]byte(patchString)) + if err != nil { + return err + } + txn := mustGetContextTxn(ctx) + cols, err := description.GetCollections(ctx, txn) + if err != nil { + return err + } + + existingColsByID := map[uint32]client.CollectionDescription{} + for _, col := range cols { + existingColsByID[col.ID] = col + } + + existingDescriptionJson, err := json.Marshal(existingColsByID) + if err != nil { + return err + } + + newDescriptionJson, err := patch.Apply(existingDescriptionJson) + if err != nil { + return err + } + + var newColsByID map[uint32]client.CollectionDescription + decoder := json.NewDecoder(strings.NewReader(string(newDescriptionJson))) + decoder.DisallowUnknownFields() + err = decoder.Decode(&newColsByID) + if err != nil { + return err + } + + err = db.validateCollectionChanges(existingColsByID, newColsByID) + if err != nil { + return err + } + + for _, col := range newColsByID { + _, err := description.SaveCollection(ctx, txn, col) + if err != nil { + return err + } + + existingCol, ok := existingColsByID[col.ID] + if ok { + // Clear any existing migrations in the registry, using this semi-hacky way + // to avoid adding more functions to a public interface that we wish to remove. + + for _, src := range existingCol.CollectionSources() { + if src.Transform.HasValue() { + err = db.LensRegistry().SetMigration(ctx, existingCol.ID, model.Lens{}) + if err != nil { + return err + } + } + } + for _, src := range existingCol.QuerySources() { + if src.Transform.HasValue() { + err = db.LensRegistry().SetMigration(ctx, existingCol.ID, model.Lens{}) + if err != nil { + return err + } + } + } + } + + for _, src := range col.CollectionSources() { + if src.Transform.HasValue() { + err = db.LensRegistry().SetMigration(ctx, col.ID, src.Transform.Value()) + if err != nil { + return err + } + } + } + + for _, src := range col.QuerySources() { + if src.Transform.HasValue() { + err = db.LensRegistry().SetMigration(ctx, col.ID, src.Transform.Value()) + if err != nil { + return err + } + } + } + } + + return db.loadSchema(ctx) +} + +var patchCollectionValidators = []func( + map[uint32]client.CollectionDescription, + map[uint32]client.CollectionDescription, +) error{ + validateCollectionNameUnique, + validateSingleVersionActive, + validateSourcesNotRedefined, + validateIndexesNotModified, + validateFieldsNotModified, + validatePolicyNotModified, + validateIDNotZero, + validateIDUnique, + validateIDExists, + validateRootIDNotMutated, + validateSchemaVersionIDNotMutated, + validateCollectionNotRemoved, +} + +func (db *db) validateCollectionChanges( + oldColsByID map[uint32]client.CollectionDescription, + newColsByID map[uint32]client.CollectionDescription, +) error { + for _, validators := range patchCollectionValidators { + err := validators(oldColsByID, newColsByID) + if err != nil { + return err + } + } + + return nil +} + +var newCollectionValidators = []func( + client.CollectionDefinition, + map[string]client.CollectionDefinition, +) error{ + validateSecondaryFieldsPairUp, + validateRelationPointsToValidKind, + validateSingleSidePrimary, +} + +func (db *db) validateNewCollection( + def client.CollectionDefinition, + defsByName map[string]client.CollectionDefinition, +) error { + for _, validators := range newCollectionValidators { + err := validators(def, defsByName) + if err != nil { + return err + } + } + + return nil +} + +func validateRelationPointsToValidKind( + def client.CollectionDefinition, + defsByName map[string]client.CollectionDefinition, +) error { + for _, field := range def.Description.Fields { + if !field.Kind.HasValue() { + continue + } + + if !field.Kind.Value().IsObject() { + continue + } + + underlying := field.Kind.Value().Underlying() + _, ok := defsByName[underlying] + if !ok { + return NewErrFieldKindNotFound(field.Name, underlying) + } + } + + return nil +} + +func validateSecondaryFieldsPairUp( + def client.CollectionDefinition, + defsByName map[string]client.CollectionDefinition, +) error { + for _, field := range def.Description.Fields { + if !field.Kind.HasValue() { + continue + } + + if !field.Kind.Value().IsObject() { + continue + } + + if !field.RelationName.HasValue() { + continue + } + + _, hasSchemaField := def.Schema.GetFieldByName(field.Name) + if hasSchemaField { + continue + } + + underlying := field.Kind.Value().Underlying() + otherDef, ok := defsByName[underlying] + if !ok { + continue + } + + if len(otherDef.Description.Fields) == 0 { + // Views/embedded objects do not require both sides of the relation to be defined. + continue + } + + otherField, ok := otherDef.Description.GetFieldByRelation( + field.RelationName.Value(), + def.GetName(), + field.Name, + ) + if !ok { + return NewErrRelationMissingField(underlying, field.RelationName.Value()) + } + + _, ok = otherDef.Schema.GetFieldByName(otherField.Name) + if !ok { + // This secondary is paired with another secondary, which is invalid + return NewErrRelationMissingField(underlying, field.RelationName.Value()) + } + } + + return nil +} + +func validateSingleSidePrimary( + def client.CollectionDefinition, + defsByName map[string]client.CollectionDefinition, +) error { + for _, field := range def.Description.Fields { + if !field.Kind.HasValue() { + continue + } + + if !field.Kind.Value().IsObject() { + continue + } + + if !field.RelationName.HasValue() { + continue + } + + _, hasSchemaField := def.Schema.GetFieldByName(field.Name) + if !hasSchemaField { + // This is a secondary field and thus passes this rule + continue + } + + underlying := field.Kind.Value().Underlying() + otherDef, ok := defsByName[underlying] + if !ok { + continue + } + + otherField, ok := otherDef.Description.GetFieldByRelation( + field.RelationName.Value(), + def.GetName(), + field.Name, + ) + if !ok { + // This must be a one-sided relation, in which case it passes this rule + continue + } + + _, ok = otherDef.Schema.GetFieldByName(otherField.Name) + if ok { + // This primary is paired with another primary, which is invalid + return ErrMultipleRelationPrimaries + } + } + + return nil +} + +func validateCollectionNameUnique( + oldColsByID map[uint32]client.CollectionDescription, + newColsByID map[uint32]client.CollectionDescription, +) error { + names := map[string]struct{}{} + for _, col := range newColsByID { + if !col.Name.HasValue() { + continue + } + + if _, ok := names[col.Name.Value()]; ok { + return NewErrCollectionAlreadyExists(col.Name.Value()) + } + names[col.Name.Value()] = struct{}{} + } + + return nil +} + +func validateSingleVersionActive( + oldColsByID map[uint32]client.CollectionDescription, + newColsByID map[uint32]client.CollectionDescription, +) error { + rootsWithActiveCol := map[uint32]struct{}{} + for _, col := range newColsByID { + if !col.Name.HasValue() { + continue + } + + if _, ok := rootsWithActiveCol[col.RootID]; ok { + return NewErrMultipleActiveCollectionVersions(col.Name.Value(), col.RootID) + } + rootsWithActiveCol[col.RootID] = struct{}{} + } + + return nil +} + +// validateSourcesNotRedefined specifies the limitations on how the collection sources +// can be mutated. +// +// Currently new sources cannot be added, existing cannot be removed, and CollectionSources +// cannot be redirected to other collections. +func validateSourcesNotRedefined( + oldColsByID map[uint32]client.CollectionDescription, + newColsByID map[uint32]client.CollectionDescription, +) error { + for _, newCol := range newColsByID { + oldCol, ok := oldColsByID[newCol.ID] + if !ok { + continue + } + + newColSources := newCol.CollectionSources() + oldColSources := oldCol.CollectionSources() + + if len(newColSources) != len(oldColSources) { + return NewErrCollectionSourcesCannotBeAddedRemoved(newCol.ID) + } + + for i := range newColSources { + if newColSources[i].SourceCollectionID != oldColSources[i].SourceCollectionID { + return NewErrCollectionSourceIDMutated( + newCol.ID, + newColSources[i].SourceCollectionID, + oldColSources[i].SourceCollectionID, + ) + } + } + + newQuerySources := newCol.QuerySources() + oldQuerySources := oldCol.QuerySources() + + if len(newQuerySources) != len(oldQuerySources) { + return NewErrCollectionSourcesCannotBeAddedRemoved(newCol.ID) + } + } + + return nil +} + +func validateIndexesNotModified( + oldColsByID map[uint32]client.CollectionDescription, + newColsByID map[uint32]client.CollectionDescription, +) error { + for _, newCol := range newColsByID { + oldCol, ok := oldColsByID[newCol.ID] + if !ok { + continue + } + + // DeepEqual is temporary, as this validation is temporary + if !reflect.DeepEqual(oldCol.Indexes, newCol.Indexes) { + return NewErrCollectionIndexesCannotBeMutated(newCol.ID) + } + } + + return nil +} + +func validateFieldsNotModified( + oldColsByID map[uint32]client.CollectionDescription, + newColsByID map[uint32]client.CollectionDescription, +) error { + for _, newCol := range newColsByID { + oldCol, ok := oldColsByID[newCol.ID] + if !ok { + continue + } + + // DeepEqual is temporary, as this validation is temporary + if !reflect.DeepEqual(oldCol.Fields, newCol.Fields) { + return NewErrCollectionFieldsCannotBeMutated(newCol.ID) + } + } + + return nil +} + +func validatePolicyNotModified( + oldColsByID map[uint32]client.CollectionDescription, + newColsByID map[uint32]client.CollectionDescription, +) error { + for _, newCol := range newColsByID { + oldCol, ok := oldColsByID[newCol.ID] + if !ok { + continue + } + + // DeepEqual is temporary, as this validation is temporary + if !reflect.DeepEqual(oldCol.Policy, newCol.Policy) { + return NewErrCollectionPolicyCannotBeMutated(newCol.ID) + } + } + + return nil +} + +func validateIDNotZero( + oldColsByID map[uint32]client.CollectionDescription, + newColsByID map[uint32]client.CollectionDescription, +) error { + for _, newCol := range newColsByID { + if newCol.ID == 0 { + return ErrCollectionIDCannotBeZero + } + } + + return nil +} + +func validateIDUnique( + oldColsByID map[uint32]client.CollectionDescription, + newColsByID map[uint32]client.CollectionDescription, +) error { + colIds := map[uint32]struct{}{} + for _, newCol := range newColsByID { + if _, ok := colIds[newCol.ID]; ok { + return NewErrCollectionIDAlreadyExists(newCol.ID) + } + colIds[newCol.ID] = struct{}{} + } + + return nil +} + +func validateIDExists( + oldColsByID map[uint32]client.CollectionDescription, + newColsByID map[uint32]client.CollectionDescription, +) error { + for _, newCol := range newColsByID { + if _, ok := oldColsByID[newCol.ID]; !ok { + return NewErrAddCollectionIDWithPatch(newCol.ID) + } + } + + return nil +} + +func validateRootIDNotMutated( + oldColsByID map[uint32]client.CollectionDescription, + newColsByID map[uint32]client.CollectionDescription, +) error { + for _, newCol := range newColsByID { + oldCol, ok := oldColsByID[newCol.ID] + if !ok { + continue + } + + if newCol.RootID != oldCol.RootID { + return NewErrCollectionRootIDCannotBeMutated(newCol.ID) + } + } + + return nil +} + +func validateSchemaVersionIDNotMutated( + oldColsByID map[uint32]client.CollectionDescription, + newColsByID map[uint32]client.CollectionDescription, +) error { + for _, newCol := range newColsByID { + oldCol, ok := oldColsByID[newCol.ID] + if !ok { + continue + } + + if newCol.SchemaVersionID != oldCol.SchemaVersionID { + return NewErrCollectionSchemaVersionIDCannotBeMutated(newCol.ID) + } + } + + return nil +} + +func validateCollectionNotRemoved( + oldColsByID map[uint32]client.CollectionDescription, + newColsByID map[uint32]client.CollectionDescription, +) error { +oldLoop: + for _, oldCol := range oldColsByID { + for _, newCol := range newColsByID { + // It is not enough to just match by the map index, in case the index does not pair + // up with the ID (this can happen if a user moves the collection within the map) + if newCol.ID == oldCol.ID { + continue oldLoop + } + } + + return NewErrCollectionsCannotBeDeleted(oldCol.ID) + } + + return nil +} + // SetActiveSchemaVersion activates all collection versions with the given schema version, and deactivates all // those without it (if they share the same schema root). // @@ -535,13 +1057,12 @@ func validateUpdateSchemaFields( // It will return an error if the provided schema version ID does not exist. func (db *db) setActiveSchemaVersion( ctx context.Context, - txn datastore.Txn, schemaVersionID string, ) error { if schemaVersionID == "" { return ErrSchemaVersionIDEmpty } - + txn := mustGetContextTxn(ctx) cols, err := description.GetCollectionsBySchemaVersionID(ctx, txn, schemaVersionID) if err != nil { return err @@ -585,11 +1106,11 @@ func (db *db) setActiveSchemaVersion( if len(sources) > 0 { // For now, we assume that each collection can only have a single source. This will likely need // to change later. - activeCol, rootCol, isActiveFound = db.getActiveCollectionDown(ctx, txn, colsByID, sources[0].SourceCollectionID) + activeCol, rootCol, isActiveFound = db.getActiveCollectionDown(ctx, colsByID, sources[0].SourceCollectionID) } if !isActiveFound { // We need to look both down and up for the active version - the most recent is not necessarily the active one. - activeCol, isActiveFound = db.getActiveCollectionUp(ctx, txn, colsBySourceID, rootCol.ID) + activeCol, isActiveFound = db.getActiveCollectionUp(ctx, colsBySourceID, rootCol.ID) } var newName string @@ -618,12 +1139,11 @@ func (db *db) setActiveSchemaVersion( } // Load the schema into the clients (e.g. GQL) - return db.loadSchema(ctx, txn) + return db.loadSchema(ctx) } func (db *db) getActiveCollectionDown( ctx context.Context, - txn datastore.Txn, colsByID map[uint32]client.CollectionDescription, id uint32, ) (client.CollectionDescription, client.CollectionDescription, bool) { @@ -646,12 +1166,11 @@ func (db *db) getActiveCollectionDown( // For now, we assume that each collection can only have a single source. This will likely need // to change later. - return db.getActiveCollectionDown(ctx, txn, colsByID, sources[0].SourceCollectionID) + return db.getActiveCollectionDown(ctx, colsByID, sources[0].SourceCollectionID) } func (db *db) getActiveCollectionUp( ctx context.Context, - txn datastore.Txn, colsBySourceID map[uint32][]client.CollectionDescription, id uint32, ) (client.CollectionDescription, bool) { @@ -665,7 +1184,7 @@ func (db *db) getActiveCollectionUp( if col.Name.HasValue() { return col, true } - activeCol, isFound := db.getActiveCollectionUp(ctx, txn, colsBySourceID, col.ID) + activeCol, isFound := db.getActiveCollectionUp(ctx, colsBySourceID, col.ID) if isFound { return activeCol, isFound } @@ -674,7 +1193,9 @@ func (db *db) getActiveCollectionUp( return client.CollectionDescription{}, false } -func (db *db) getCollectionByID(ctx context.Context, txn datastore.Txn, id uint32) (client.Collection, error) { +func (db *db) getCollectionByID(ctx context.Context, id uint32) (client.Collection, error) { + txn := mustGetContextTxn(ctx) + col, err := description.GetCollectionByID(ctx, txn, id) if err != nil { return nil, err @@ -686,7 +1207,8 @@ func (db *db) getCollectionByID(ctx context.Context, txn datastore.Txn, id uint3 } collection := db.newCollection(col, schema) - err = collection.loadIndexes(ctx, txn) + + err = collection.loadIndexes(ctx) if err != nil { return nil, err } @@ -695,12 +1217,12 @@ func (db *db) getCollectionByID(ctx context.Context, txn datastore.Txn, id uint3 } // getCollectionByName returns an existing collection within the database. -func (db *db) getCollectionByName(ctx context.Context, txn datastore.Txn, name string) (client.Collection, error) { +func (db *db) getCollectionByName(ctx context.Context, name string) (client.Collection, error) { if name == "" { return nil, ErrCollectionNameEmpty } - cols, err := db.getCollections(ctx, txn, client.CollectionFetchOptions{Name: immutable.Some(name)}) + cols, err := db.getCollections(ctx, client.CollectionFetchOptions{Name: immutable.Some(name)}) if err != nil { return nil, err } @@ -716,11 +1238,11 @@ func (db *db) getCollectionByName(ctx context.Context, txn datastore.Txn, name s // is provided. func (db *db) getCollections( ctx context.Context, - txn datastore.Txn, options client.CollectionFetchOptions, ) ([]client.Collection, error) { - var cols []client.CollectionDescription + txn := mustGetContextTxn(ctx) + var cols []client.CollectionDescription switch { case options.Name.HasValue(): col, err := description.GetCollectionByName(ctx, txn, options.Name.Value()) @@ -789,7 +1311,7 @@ func (db *db) getCollections( collection := db.newCollection(col, schema) collections = append(collections, collection) - err = collection.loadIndexes(ctx, txn) + err = collection.loadIndexes(ctx) if err != nil { return nil, err } @@ -799,7 +1321,9 @@ func (db *db) getCollections( } // getAllActiveDefinitions returns all queryable collection/views and any embedded schema used by them. -func (db *db) getAllActiveDefinitions(ctx context.Context, txn datastore.Txn) ([]client.CollectionDefinition, error) { +func (db *db) getAllActiveDefinitions(ctx context.Context) ([]client.CollectionDefinition, error) { + txn := mustGetContextTxn(ctx) + cols, err := description.GetActiveCollections(ctx, txn) if err != nil { return nil, err @@ -814,7 +1338,7 @@ func (db *db) getAllActiveDefinitions(ctx context.Context, txn datastore.Txn) ([ collection := db.newCollection(col, schema) - err = collection.loadIndexes(ctx, txn) + err = collection.loadIndexes(ctx) if err != nil { return nil, err } @@ -843,19 +1367,20 @@ func (db *db) getAllActiveDefinitions(ctx context.Context, txn datastore.Txn) ([ // // @todo: We probably need a lock on the collection for this kind of op since // it hits every key and will cause Tx conflicts for concurrent Txs -func (c *collection) GetAllDocIDs(ctx context.Context) (<-chan client.DocIDResult, error) { - txn, err := c.getTxn(ctx, true) +func (c *collection) GetAllDocIDs( + ctx context.Context, +) (<-chan client.DocIDResult, error) { + ctx, _, err := ensureContextTxn(ctx, c.db, true) if err != nil { return nil, err } - - return c.getAllDocIDsChan(ctx, txn) + return c.getAllDocIDsChan(ctx) } func (c *collection) getAllDocIDsChan( ctx context.Context, - txn datastore.Txn, ) (<-chan client.DocIDResult, error) { + txn := mustGetContextTxn(ctx) prefix := core.PrimaryDataStoreKey{ // empty path for all keys prefix CollectionRootID: c.Description().RootID, } @@ -871,10 +1396,10 @@ func (c *collection) getAllDocIDsChan( go func() { defer func() { if err := q.Close(); err != nil { - log.ErrorE(ctx, errFailedtoCloseQueryReqAllIDs, err) + log.ErrorContextE(ctx, errFailedtoCloseQueryReqAllIDs, err) } close(resCh) - c.discardImplicitTxn(ctx, txn) + txn.Discard(ctx) }() for res := range q.Next() { // check for Done on context first @@ -896,12 +1421,28 @@ func (c *collection) getAllDocIDsChan( docID, err := client.NewDocIDFromString(rawDocID) if err != nil { resCh <- client.DocIDResult{ - Err: res.Error, + Err: err, } return } - resCh <- client.DocIDResult{ - ID: docID, + + canRead, err := c.checkAccessOfDocWithACP( + ctx, + acp.ReadPermission, + docID.String(), + ) + + if err != nil { + resCh <- client.DocIDResult{ + Err: err, + } + return + } + + if canRead { + resCh <- client.DocIDResult{ + ID: docID, + } } } }() @@ -937,50 +1478,45 @@ func (c *collection) Definition() client.CollectionDefinition { return c.def } -// WithTxn returns a new instance of the collection, with a transaction -// handle instead of a raw DB handle. -func (c *collection) WithTxn(txn datastore.Txn) client.Collection { - return &collection{ - db: c.db, - txn: immutable.Some(txn), - def: c.def, - indexes: c.indexes, - fetcherFactory: c.fetcherFactory, - } -} - // Create a new document. // Will verify the DocID/CID to ensure that the new document is correctly formatted. -func (c *collection) Create(ctx context.Context, doc *client.Document) error { - txn, err := c.getTxn(ctx, false) +func (c *collection) Create( + ctx context.Context, + doc *client.Document, +) error { + ctx, txn, err := ensureContextTxn(ctx, c.db, false) if err != nil { return err } - defer c.discardImplicitTxn(ctx, txn) + defer txn.Discard(ctx) - err = c.create(ctx, txn, doc) + err = c.create(ctx, doc) if err != nil { return err } - return c.commitImplicitTxn(ctx, txn) + + return txn.Commit(ctx) } // CreateMany creates a collection of documents at once. // Will verify the DocID/CID to ensure that the new documents are correctly formatted. -func (c *collection) CreateMany(ctx context.Context, docs []*client.Document) error { - txn, err := c.getTxn(ctx, false) +func (c *collection) CreateMany( + ctx context.Context, + docs []*client.Document, +) error { + ctx, txn, err := ensureContextTxn(ctx, c.db, false) if err != nil { return err } - defer c.discardImplicitTxn(ctx, txn) + defer txn.Discard(ctx) for _, doc := range docs { - err = c.create(ctx, txn, doc) + err = c.create(ctx, doc) if err != nil { return err } } - return c.commitImplicitTxn(ctx, txn) + return txn.Commit(ctx) } func (c *collection) getDocIDAndPrimaryKeyFromDoc( @@ -999,14 +1535,17 @@ func (c *collection) getDocIDAndPrimaryKeyFromDoc( return docID, primaryKey, nil } -func (c *collection) create(ctx context.Context, txn datastore.Txn, doc *client.Document) error { +func (c *collection) create( + ctx context.Context, + doc *client.Document, +) error { docID, primaryKey, err := c.getDocIDAndPrimaryKeyFromDoc(doc) if err != nil { return err } // check if doc already exists - exists, isDeleted, err := c.exists(ctx, txn, primaryKey) + exists, isDeleted, err := c.exists(ctx, primaryKey) if err != nil { return err } @@ -1019,6 +1558,7 @@ func (c *collection) create(ctx context.Context, txn datastore.Txn, doc *client. // write value object marker if we have an empty doc if len(doc.Values()) == 0 { + txn := mustGetContextTxn(ctx) valueKey := c.getDataStoreKeyFromDocID(docID) err = txn.Datastore().Put(ctx, valueKey.ToDS(), []byte{base.ObjectMarker}) if err != nil { @@ -1027,42 +1567,50 @@ func (c *collection) create(ctx context.Context, txn datastore.Txn, doc *client. } // write data to DB via MerkleClock/CRDT - _, err = c.save(ctx, txn, doc, true) + _, err = c.save(ctx, doc, true) if err != nil { return err } - return c.indexNewDoc(ctx, txn, doc) + err = c.indexNewDoc(ctx, doc) + if err != nil { + return err + } + + return c.registerDocWithACP(ctx, doc.ID().String()) } // Update an existing document with the new values. // Any field that needs to be removed or cleared should call doc.Clear(field) before. // Any field that is nil/empty that hasn't called Clear will be ignored. -func (c *collection) Update(ctx context.Context, doc *client.Document) error { - txn, err := c.getTxn(ctx, false) +func (c *collection) Update( + ctx context.Context, + doc *client.Document, +) error { + ctx, txn, err := ensureContextTxn(ctx, c.db, false) if err != nil { return err } - defer c.discardImplicitTxn(ctx, txn) + defer txn.Discard(ctx) primaryKey := c.getPrimaryKeyFromDocID(doc.ID()) - exists, isDeleted, err := c.exists(ctx, txn, primaryKey) + exists, isDeleted, err := c.exists(ctx, primaryKey) if err != nil { return err } if !exists { - return client.ErrDocumentNotFound + return client.ErrDocumentNotFoundOrNotAuthorized } if isDeleted { return NewErrDocumentDeleted(primaryKey.DocID) } - err = c.update(ctx, txn, doc) + err = c.update(ctx, doc) if err != nil { return err } - return c.commitImplicitTxn(ctx, txn) + return txn.Commit(ctx) } // Contract: DB Exists check is already performed, and a doc with the given ID exists. @@ -1070,8 +1618,24 @@ func (c *collection) Update(ctx context.Context, doc *client.Document) error { // or, just update everything regardless. // Should probably be smart about the update due to the MerkleCRDT overhead, shouldn't // add to the bloat. -func (c *collection) update(ctx context.Context, txn datastore.Txn, doc *client.Document) error { - _, err := c.save(ctx, txn, doc, false) +func (c *collection) update( + ctx context.Context, + doc *client.Document, +) error { + // Stop the update if the correct permissions aren't there. + canUpdate, err := c.checkAccessOfDocWithACP( + ctx, + acp.WritePermission, + doc.ID().String(), + ) + if err != nil { + return err + } + if !canUpdate { + return client.ErrDocumentNotFoundOrNotAuthorized + } + + _, err = c.save(ctx, doc, false) if err != nil { return err } @@ -1080,16 +1644,19 @@ func (c *collection) update(ctx context.Context, txn datastore.Txn, doc *client. // Save a document into the db. // Either by creating a new document or by updating an existing one -func (c *collection) Save(ctx context.Context, doc *client.Document) error { - txn, err := c.getTxn(ctx, false) +func (c *collection) Save( + ctx context.Context, + doc *client.Document, +) error { + ctx, txn, err := ensureContextTxn(ctx, c.db, false) if err != nil { return err } - defer c.discardImplicitTxn(ctx, txn) + defer txn.Discard(ctx) // Check if document already exists with primary DS key. primaryKey := c.getPrimaryKeyFromDocID(doc.ID()) - exists, isDeleted, err := c.exists(ctx, txn, primaryKey) + exists, isDeleted, err := c.exists(ctx, primaryKey) if err != nil { return err } @@ -1099,29 +1666,33 @@ func (c *collection) Save(ctx context.Context, doc *client.Document) error { } if exists { - err = c.update(ctx, txn, doc) + err = c.update(ctx, doc) } else { - err = c.create(ctx, txn, doc) + err = c.create(ctx, doc) } if err != nil { return err } - return c.commitImplicitTxn(ctx, txn) + return txn.Commit(ctx) } +// save saves the document state. save MUST not be called outside the `c.create` +// and `c.update` methods as we wrap the acp logic within those methods. Calling +// save elsewhere could cause the omission of acp checks. func (c *collection) save( ctx context.Context, - txn datastore.Txn, doc *client.Document, isCreate bool, ) (cid.Cid, error) { if !isCreate { - err := c.updateIndexedDoc(ctx, txn, doc) + err := c.updateIndexedDoc(ctx, doc) if err != nil { return cid.Undef, err } } + txn := mustGetContextTxn(ctx) + // NOTE: We delay the final Clean() call until we know // the commit on the transaction is successful. If we didn't // wait, and just did it here, then *if* the commit fails down @@ -1164,7 +1735,13 @@ func (c *collection) save( if isSecondaryRelationID { primaryId := val.Value().(string) - err = c.patchPrimaryDoc(ctx, txn, c.Name().Value(), relationFieldDescription, primaryKey.DocID, primaryId) + err = c.patchPrimaryDoc( + ctx, + c.Name().Value(), + relationFieldDescription, + primaryKey.DocID, + primaryId, + ) if err != nil { return cid.Undef, err } @@ -1174,7 +1751,12 @@ func (c *collection) save( continue } - err = c.validateOneToOneLinkDoesntAlreadyExist(ctx, txn, doc.ID().String(), fieldDescription, val.Value()) + err = c.validateOneToOneLinkDoesntAlreadyExist( + ctx, + doc.ID().String(), + fieldDescription, + val.Value(), + ) if err != nil { return cid.Undef, err } @@ -1206,7 +1788,6 @@ func (c *collection) save( headNode, priority, err := c.saveCompositeToMerkleCRDT( ctx, - txn, primaryKey.ToDataStoreKey(), links, client.Active, @@ -1240,7 +1821,6 @@ func (c *collection) save( func (c *collection) validateOneToOneLinkDoesntAlreadyExist( ctx context.Context, - txn datastore.Txn, docID string, fieldDescription client.FieldDefinition, value any, @@ -1259,22 +1839,22 @@ func (c *collection) validateOneToOneLinkDoesntAlreadyExist( if !ok { return client.NewErrFieldNotExist(strings.TrimSuffix(fieldDescription.Name, request.RelatedObjectID)) } - if objFieldDescription.Kind != client.FieldKind_FOREIGN_OBJECT { + if !(objFieldDescription.Kind.IsObject() && !objFieldDescription.Kind.IsArray()) { return nil } - otherCol, err := c.db.getCollectionByName(ctx, txn, objFieldDescription.Schema) + otherCol, err := c.db.getCollectionByName(ctx, objFieldDescription.Kind.Underlying()) if err != nil { return err } - otherSchema := otherCol.Schema() otherObjFieldDescription, _ := otherCol.Description().GetFieldByRelation( fieldDescription.RelationName, c.Name().Value(), objFieldDescription.Name, - &otherSchema, ) - if otherObjFieldDescription.Kind != client.FieldKind_FOREIGN_OBJECT { + if !(otherObjFieldDescription.Kind.HasValue() && + otherObjFieldDescription.Kind.Value().IsObject() && + !otherObjFieldDescription.Kind.Value().IsArray()) { // If the other field is not an object field then this is not a one to one relation and we can continue return nil } @@ -1286,7 +1866,7 @@ func (c *collection) validateOneToOneLinkDoesntAlreadyExist( fieldDescription.Name, value, ) - selectionPlan, err := c.makeSelectionPlan(ctx, txn, filter) + selectionPlan, err := c.makeSelectionPlan(ctx, filter) if err != nil { return err } @@ -1338,54 +1918,61 @@ func (c *collection) validateOneToOneLinkDoesntAlreadyExist( // otherwise will return false, along with an error, if it cannot. // If the document doesn't exist, then it will return false, and a ErrDocumentNotFound error. // This operation will all state relating to the given DocID. This includes data, block, and head storage. -func (c *collection) Delete(ctx context.Context, docID client.DocID) (bool, error) { - txn, err := c.getTxn(ctx, false) +func (c *collection) Delete( + ctx context.Context, + docID client.DocID, +) (bool, error) { + ctx, txn, err := ensureContextTxn(ctx, c.db, false) if err != nil { return false, err } - defer c.discardImplicitTxn(ctx, txn) + defer txn.Discard(ctx) primaryKey := c.getPrimaryKeyFromDocID(docID) - exists, isDeleted, err := c.exists(ctx, txn, primaryKey) - if err != nil { - return false, err - } - if !exists || isDeleted { - return false, client.ErrDocumentNotFound - } - if isDeleted { - return false, NewErrDocumentDeleted(primaryKey.DocID) - } - err = c.applyDelete(ctx, txn, primaryKey) + err = c.applyDelete(ctx, primaryKey) if err != nil { return false, err } - return true, c.commitImplicitTxn(ctx, txn) + return true, txn.Commit(ctx) } // Exists checks if a given document exists with supplied DocID. -func (c *collection) Exists(ctx context.Context, docID client.DocID) (bool, error) { - txn, err := c.getTxn(ctx, false) +func (c *collection) Exists( + ctx context.Context, + docID client.DocID, +) (bool, error) { + ctx, txn, err := ensureContextTxn(ctx, c.db, false) if err != nil { return false, err } - defer c.discardImplicitTxn(ctx, txn) + defer txn.Discard(ctx) primaryKey := c.getPrimaryKeyFromDocID(docID) - exists, isDeleted, err := c.exists(ctx, txn, primaryKey) + exists, isDeleted, err := c.exists(ctx, primaryKey) if err != nil && !errors.Is(err, ds.ErrNotFound) { return false, err } - return exists && !isDeleted, c.commitImplicitTxn(ctx, txn) + return exists && !isDeleted, txn.Commit(ctx) } // check if a document exists with the given primary key func (c *collection) exists( ctx context.Context, - txn datastore.Txn, primaryKey core.PrimaryDataStoreKey, ) (exists bool, isDeleted bool, err error) { + canRead, err := c.checkAccessOfDocWithACP( + ctx, + acp.ReadPermission, + primaryKey.DocID, + ) + if err != nil { + return false, false, err + } else if !canRead { + return false, false, nil + } + + txn := mustGetContextTxn(ctx) val, err := txn.Datastore().Get(ctx, primaryKey.ToDS()) if err != nil && errors.Is(err, ds.ErrNotFound) { return false, false, nil @@ -1399,13 +1986,17 @@ func (c *collection) exists( return true, false, nil } +// saveCompositeToMerkleCRDT saves the composite to the merkle CRDT. +// saveCompositeToMerkleCRDT MUST not be called outside the `c.save` +// and `c.applyDelete` methods as we wrap the acp logic around those methods. +// Calling it elsewhere could cause the omission of acp checks. func (c *collection) saveCompositeToMerkleCRDT( ctx context.Context, - txn datastore.Txn, dsKey core.DataStoreKey, links []core.DAGLink, status client.DocumentStatus, ) (ipld.Node, uint64, error) { + txn := mustGetContextTxn(ctx) dsKey = dsKey.WithFieldId(core.COMPOSITE_NAMESPACE) merkleCRDT := merklecrdt.NewMerkleCompositeDAG( txn, @@ -1421,35 +2012,6 @@ func (c *collection) saveCompositeToMerkleCRDT( return merkleCRDT.Save(ctx, links) } -// getTxn gets or creates a new transaction from the underlying db. -// If the collection already has a txn, return the existing one. -// Otherwise, create a new implicit transaction. -func (c *collection) getTxn(ctx context.Context, readonly bool) (datastore.Txn, error) { - if c.txn.HasValue() { - return c.txn.Value(), nil - } - return c.db.NewTxn(ctx, readonly) -} - -// discardImplicitTxn is a proxy function used by the collection to execute the Discard() -// transaction function only if its an implicit transaction. -// -// Implicit transactions are transactions that are created *during* an operation execution as a side effect. -// -// Explicit transactions are provided to the collection object via the "WithTxn(...)" function. -func (c *collection) discardImplicitTxn(ctx context.Context, txn datastore.Txn) { - if !c.txn.HasValue() { - txn.Discard(ctx) - } -} - -func (c *collection) commitImplicitTxn(ctx context.Context, txn datastore.Txn) error { - if !c.txn.HasValue() { - return txn.Commit(ctx) - } - return nil -} - func (c *collection) getPrimaryKeyFromDocID(docID client.DocID) core.PrimaryDataStoreKey { return core.PrimaryDataStoreKey{ CollectionRootID: c.Description().RootID, @@ -1466,7 +2028,7 @@ func (c *collection) getDataStoreKeyFromDocID(docID client.DocID) core.DataStore } func (c *collection) tryGetFieldKey(primaryKey core.PrimaryDataStoreKey, fieldName string) (core.DataStoreKey, bool) { - fieldId, hasField := c.tryGetSchemaFieldID(fieldName) + fieldId, hasField := c.tryGetFieldID(fieldName) if !hasField { return core.DataStoreKey{}, false } @@ -1478,9 +2040,9 @@ func (c *collection) tryGetFieldKey(primaryKey core.PrimaryDataStoreKey, fieldNa }, true } -// tryGetSchemaFieldID returns the FieldID of the given fieldName. +// tryGetFieldID returns the FieldID of the given fieldName. // Will return false if the field is not found. -func (c *collection) tryGetSchemaFieldID(fieldName string) (uint32, bool) { +func (c *collection) tryGetFieldID(fieldName string) (uint32, bool) { for _, field := range c.Definition().GetFields() { if field.Name == fieldName { if field.Kind.IsObject() || field.Kind.IsObjectArray() { diff --git a/db/collection_acp.go b/db/collection_acp.go new file mode 100644 index 0000000000..4a273e907e --- /dev/null +++ b/db/collection_acp.go @@ -0,0 +1,67 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package db + +import ( + "context" + + "github.com/sourcenetwork/defradb/acp" + "github.com/sourcenetwork/defradb/db/permission" +) + +// registerDocWithACP handles the registration of the document with acp. +// The registering is done at document creation on the collection. +// +// According to our access logic we have these components to worry about: +// (1) the request is permissioned (has an identity signature), +// (2) the collection is permissioned (has a policy), +// (3) acp is available (acp is enabled). +// +// The document is only registered if all (1) (2) and (3) are true. +// +// Otherwise, nothing is registered with the acp system. +func (c *collection) registerDocWithACP( + ctx context.Context, + docID string, +) error { + // If acp is not available, then no document is registered. + if !c.db.acp.HasValue() { + return nil + } + identity := GetContextIdentity(ctx) + return permission.RegisterDocOnCollectionWithACP( + ctx, + identity, + c.db.acp.Value(), + c, + docID, + ) +} + +func (c *collection) checkAccessOfDocWithACP( + ctx context.Context, + dpiPermission acp.DPIPermission, + docID string, +) (bool, error) { + // If acp is not available, then we have unrestricted access. + if !c.db.acp.HasValue() { + return true, nil + } + identity := GetContextIdentity(ctx) + return permission.CheckAccessOfDocOnCollectionWithACP( + ctx, + identity, + c.db.acp.Value(), + c, + dpiPermission, + docID, + ) +} diff --git a/db/collection_delete.go b/db/collection_delete.go index 785b2830d7..62ebd7f167 100644 --- a/db/collection_delete.go +++ b/db/collection_delete.go @@ -13,156 +13,39 @@ package db import ( "context" + "github.com/sourcenetwork/defradb/acp" "github.com/sourcenetwork/defradb/client" - "github.com/sourcenetwork/defradb/client/request" "github.com/sourcenetwork/defradb/core" - "github.com/sourcenetwork/defradb/datastore" "github.com/sourcenetwork/defradb/events" "github.com/sourcenetwork/defradb/merkle/clock" ) -// DeleteWith deletes a target document. -// -// Target can be a Filter statement, a single DocID, a single document, -// an array of DocIDs, or an array of documents. -// -// If you want more type safety, use the respective typed versions of Delete. -// Eg: DeleteWithFilter or DeleteWithDocID -func (c *collection) DeleteWith( - ctx context.Context, - target any, -) (*client.DeleteResult, error) { - switch t := target.(type) { - case string, map[string]any, *request.Filter: - return c.DeleteWithFilter(ctx, t) - case client.DocID: - return c.DeleteWithDocID(ctx, t) - case []client.DocID: - return c.DeleteWithDocIDs(ctx, t) - default: - return nil, client.ErrInvalidDeleteTarget - } -} - -// DeleteWithDocID deletes using a DocID to target a single document for delete. -func (c *collection) DeleteWithDocID( - ctx context.Context, - docID client.DocID, -) (*client.DeleteResult, error) { - txn, err := c.getTxn(ctx, false) - if err != nil { - return nil, err - } - - defer c.discardImplicitTxn(ctx, txn) - - dsKey := c.getPrimaryKeyFromDocID(docID) - res, err := c.deleteWithKey(ctx, txn, dsKey) - if err != nil { - return nil, err - } - - return res, c.commitImplicitTxn(ctx, txn) -} - -// DeleteWithDocIDs is the same as DeleteWithDocID but accepts multiple DocIDs as a slice. -func (c *collection) DeleteWithDocIDs( - ctx context.Context, - docIDs []client.DocID, -) (*client.DeleteResult, error) { - txn, err := c.getTxn(ctx, false) - if err != nil { - return nil, err - } - - defer c.discardImplicitTxn(ctx, txn) - - res, err := c.deleteWithIDs(ctx, txn, docIDs, client.Deleted) - if err != nil { - return nil, err - } - - return res, c.commitImplicitTxn(ctx, txn) -} - // DeleteWithFilter deletes using a filter to target documents for delete. func (c *collection) DeleteWithFilter( ctx context.Context, filter any, ) (*client.DeleteResult, error) { - txn, err := c.getTxn(ctx, false) - if err != nil { - return nil, err - } - - defer c.discardImplicitTxn(ctx, txn) - - res, err := c.deleteWithFilter(ctx, txn, filter, client.Deleted) + ctx, txn, err := ensureContextTxn(ctx, c.db, false) if err != nil { return nil, err } + defer txn.Discard(ctx) - return res, c.commitImplicitTxn(ctx, txn) -} - -func (c *collection) deleteWithKey( - ctx context.Context, - txn datastore.Txn, - key core.PrimaryDataStoreKey, -) (*client.DeleteResult, error) { - // Check the key we have been given to delete with actually has a corresponding - // document (i.e. document actually exists in the collection). - err := c.applyDelete(ctx, txn, key) + res, err := c.deleteWithFilter(ctx, filter, client.Deleted) if err != nil { return nil, err } - // Upon successfull deletion, record a summary. - results := &client.DeleteResult{ - Count: 1, - DocIDs: []string{key.DocID}, - } - - return results, nil -} - -func (c *collection) deleteWithIDs( - ctx context.Context, - txn datastore.Txn, - docIDs []client.DocID, - _ client.DocumentStatus, -) (*client.DeleteResult, error) { - results := &client.DeleteResult{ - DocIDs: make([]string, 0), - } - - for _, docID := range docIDs { - primaryKey := c.getPrimaryKeyFromDocID(docID) - - // Apply the function that will perform the full deletion of this document. - err := c.applyDelete(ctx, txn, primaryKey) - if err != nil { - return nil, err - } - - // Add this deleted docID to our list. - results.DocIDs = append(results.DocIDs, docID.String()) - } - - // Upon successfull deletion, record a summary of how many we deleted. - results.Count = int64(len(results.DocIDs)) - - return results, nil + return res, txn.Commit(ctx) } func (c *collection) deleteWithFilter( ctx context.Context, - txn datastore.Txn, filter any, _ client.DocumentStatus, ) (*client.DeleteResult, error) { // Make a selection plan that will scan through only the documents with matching filter. - selectionPlan, err := c.makeSelectionPlan(ctx, txn, filter) + selectionPlan, err := c.makeSelectionPlan(ctx, filter) if err != nil { return nil, err } @@ -179,7 +62,7 @@ func (c *collection) deleteWithFilter( // If the plan isn't properly closed at any exit point log the error. defer func() { if err := selectionPlan.Close(); err != nil { - log.ErrorE(ctx, "Failed to close the request plan, after filter delete", err) + log.ErrorContextE(ctx, "Failed to close the request plan, after filter delete", err) } }() @@ -210,7 +93,7 @@ func (c *collection) deleteWithFilter( } // Delete the document that is associated with this DS key we got from the filter. - err = c.applyDelete(ctx, txn, primaryKey) + err = c.applyDelete(ctx, primaryKey) if err != nil { return nil, err } @@ -226,22 +109,36 @@ func (c *collection) deleteWithFilter( func (c *collection) applyDelete( ctx context.Context, - txn datastore.Txn, primaryKey core.PrimaryDataStoreKey, ) error { - found, isDeleted, err := c.exists(ctx, txn, primaryKey) + // Must also have read permission to delete, inorder to check if document exists. + found, isDeleted, err := c.exists(ctx, primaryKey) if err != nil { return err } if !found { - return client.ErrDocumentNotFound + return client.ErrDocumentNotFoundOrNotAuthorized } if isDeleted { return NewErrDocumentDeleted(primaryKey.DocID) } - dsKey := primaryKey.ToDataStoreKey() + // Stop deletion of document if the correct permissions aren't there. + canDelete, err := c.checkAccessOfDocWithACP( + ctx, + acp.WritePermission, + primaryKey.DocID, + ) + if err != nil { + return err + } + if !canDelete { + return client.ErrDocumentNotFoundOrNotAuthorized + } + + txn := mustGetContextTxn(ctx) + dsKey := primaryKey.ToDataStoreKey() headset := clock.NewHeadSet( txn.Headstore(), dsKey.WithFieldId(core.COMPOSITE_NAMESPACE).ToHeadStoreKey(), @@ -261,7 +158,6 @@ func (c *collection) applyDelete( headNode, priority, err := c.saveCompositeToMerkleCRDT( ctx, - txn, dsKey, dagLinks, client.Deleted, diff --git a/db/collection_get.go b/db/collection_get.go index cf245fc678..75d3d2826b 100644 --- a/db/collection_get.go +++ b/db/collection_get.go @@ -15,46 +15,55 @@ import ( "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/core" - "github.com/sourcenetwork/defradb/datastore" "github.com/sourcenetwork/defradb/db/base" "github.com/sourcenetwork/defradb/db/fetcher" ) -func (c *collection) Get(ctx context.Context, docID client.DocID, showDeleted bool) (*client.Document, error) { +func (c *collection) Get( + ctx context.Context, + docID client.DocID, + showDeleted bool, +) (*client.Document, error) { // create txn - txn, err := c.getTxn(ctx, true) + ctx, txn, err := ensureContextTxn(ctx, c.db, true) if err != nil { return nil, err } - defer c.discardImplicitTxn(ctx, txn) + defer txn.Discard(ctx) primaryKey := c.getPrimaryKeyFromDocID(docID) - found, isDeleted, err := c.exists(ctx, txn, primaryKey) + found, isDeleted, err := c.exists(ctx, primaryKey) if err != nil { return nil, err } if !found || (isDeleted && !showDeleted) { - return nil, client.ErrDocumentNotFound + return nil, client.ErrDocumentNotFoundOrNotAuthorized } - doc, err := c.get(ctx, txn, primaryKey, nil, showDeleted) + doc, err := c.get(ctx, primaryKey, nil, showDeleted) if err != nil { return nil, err } - return doc, c.commitImplicitTxn(ctx, txn) + + if doc == nil { + return nil, client.ErrDocumentNotFoundOrNotAuthorized + } + + return doc, txn.Commit(ctx) } func (c *collection) get( ctx context.Context, - txn datastore.Txn, primaryKey core.PrimaryDataStoreKey, fields []client.FieldDefinition, showDeleted bool, ) (*client.Document, error) { + txn := mustGetContextTxn(ctx) + identity := GetContextIdentity(ctx) // create a new document fetcher df := c.newFetcher() // initialize it with the primary index - err := df.Init(ctx, txn, c, fields, nil, nil, false, showDeleted) + err := df.Init(ctx, identity, txn, c.db.acp, c, fields, nil, nil, false, showDeleted) if err != nil { _ = df.Close() return nil, err @@ -85,7 +94,7 @@ func (c *collection) get( return nil, nil } - doc, err := fetcher.Decode(encodedDoc, c.Schema()) + doc, err := fetcher.Decode(encodedDoc, c.Definition()) if err != nil { return nil, err } diff --git a/db/collection_index.go b/db/collection_index.go index 7fb036498a..2327ae027a 100644 --- a/db/collection_index.go +++ b/db/collection_index.go @@ -32,36 +32,33 @@ import ( // createCollectionIndex creates a new collection index and saves it to the database in its system store. func (db *db) createCollectionIndex( ctx context.Context, - txn datastore.Txn, collectionName string, desc client.IndexDescription, ) (client.IndexDescription, error) { - col, err := db.getCollectionByName(ctx, txn, collectionName) + col, err := db.getCollectionByName(ctx, collectionName) if err != nil { return client.IndexDescription{}, NewErrCanNotReadCollection(collectionName, err) } - col = col.WithTxn(txn) return col.CreateIndex(ctx, desc) } func (db *db) dropCollectionIndex( ctx context.Context, - txn datastore.Txn, collectionName, indexName string, ) error { - col, err := db.getCollectionByName(ctx, txn, collectionName) + col, err := db.getCollectionByName(ctx, collectionName) if err != nil { return NewErrCanNotReadCollection(collectionName, err) } - col = col.WithTxn(txn) return col.DropIndex(ctx, indexName) } // getAllIndexDescriptions returns all the index descriptions in the database. func (db *db) getAllIndexDescriptions( ctx context.Context, - txn datastore.Txn, ) (map[client.CollectionName][]client.IndexDescription, error) { + // callers of this function must set a context transaction + txn := mustGetContextTxn(ctx) prefix := core.NewCollectionIndexKey(immutable.None[uint32](), "") keys, indexDescriptions, err := datastore.DeserializePrefix[client.IndexDescription](ctx, @@ -95,12 +92,16 @@ func (db *db) getAllIndexDescriptions( func (db *db) fetchCollectionIndexDescriptions( ctx context.Context, - txn datastore.Txn, colID uint32, ) ([]client.IndexDescription, error) { + // callers of this function must set a context transaction + txn := mustGetContextTxn(ctx) prefix := core.NewCollectionIndexKey(immutable.Some(colID), "") - _, indexDescriptions, err := datastore.DeserializePrefix[client.IndexDescription](ctx, - prefix.ToString(), txn.Systemstore()) + _, indexDescriptions, err := datastore.DeserializePrefix[client.IndexDescription]( + ctx, + prefix.ToString(), + txn.Systemstore(), + ) if err != nil { return nil, err } @@ -108,59 +109,61 @@ func (db *db) fetchCollectionIndexDescriptions( } func (c *collection) CreateDocIndex(ctx context.Context, doc *client.Document) error { - txn, err := c.getTxn(ctx, false) + ctx, txn, err := ensureContextTxn(ctx, c.db, false) if err != nil { return err } - defer c.discardImplicitTxn(ctx, txn) + defer txn.Discard(ctx) - err = c.indexNewDoc(ctx, txn, doc) + err = c.indexNewDoc(ctx, doc) if err != nil { return err } - return c.commitImplicitTxn(ctx, txn) + return txn.Commit(ctx) } func (c *collection) UpdateDocIndex(ctx context.Context, oldDoc, newDoc *client.Document) error { - txn, err := c.getTxn(ctx, false) + ctx, txn, err := ensureContextTxn(ctx, c.db, false) if err != nil { return err } - defer c.discardImplicitTxn(ctx, txn) + defer txn.Discard(ctx) - err = c.deleteIndexedDoc(ctx, txn, oldDoc) + err = c.deleteIndexedDoc(ctx, oldDoc) if err != nil { return err } - err = c.indexNewDoc(ctx, txn, newDoc) + err = c.indexNewDoc(ctx, newDoc) if err != nil { return err } - return c.commitImplicitTxn(ctx, txn) + return txn.Commit(ctx) } func (c *collection) DeleteDocIndex(ctx context.Context, doc *client.Document) error { - txn, err := c.getTxn(ctx, false) + ctx, txn, err := ensureContextTxn(ctx, c.db, false) if err != nil { return err } - defer c.discardImplicitTxn(ctx, txn) + defer txn.Discard(ctx) - err = c.deleteIndexedDoc(ctx, txn, doc) + err = c.deleteIndexedDoc(ctx, doc) if err != nil { return err } - return c.commitImplicitTxn(ctx, txn) + return txn.Commit(ctx) } -func (c *collection) indexNewDoc(ctx context.Context, txn datastore.Txn, doc *client.Document) error { - err := c.loadIndexes(ctx, txn) +func (c *collection) indexNewDoc(ctx context.Context, doc *client.Document) error { + err := c.loadIndexes(ctx) if err != nil { return err } + // callers of this function must set a context transaction + txn := mustGetContextTxn(ctx) for _, index := range c.indexes { err = index.Save(ctx, txn, doc) if err != nil { @@ -172,16 +175,16 @@ func (c *collection) indexNewDoc(ctx context.Context, txn datastore.Txn, doc *cl func (c *collection) updateIndexedDoc( ctx context.Context, - txn datastore.Txn, doc *client.Document, ) error { - err := c.loadIndexes(ctx, txn) + err := c.loadIndexes(ctx) if err != nil { return err } + // TODO-ACP: https://github.com/sourcenetwork/defradb/issues/2365 - ACP <> Indexing, possibly also check + // and handle the case of when oldDoc == nil (will be nil if inaccessible document). oldDoc, err := c.get( ctx, - txn, c.getPrimaryKeyFromDocID(doc.ID()), c.Definition().CollectIndexedFields(), false, @@ -189,6 +192,7 @@ func (c *collection) updateIndexedDoc( if err != nil { return err } + txn := mustGetContextTxn(ctx) for _, index := range c.indexes { err = index.Update(ctx, txn, oldDoc, doc) if err != nil { @@ -200,13 +204,13 @@ func (c *collection) updateIndexedDoc( func (c *collection) deleteIndexedDoc( ctx context.Context, - txn datastore.Txn, doc *client.Document, ) error { - err := c.loadIndexes(ctx, txn) + err := c.loadIndexes(ctx) if err != nil { return err } + txn := mustGetContextTxn(ctx) for _, index := range c.indexes { err = index.Delete(ctx, txn, doc) if err != nil { @@ -235,24 +239,29 @@ func (c *collection) CreateIndex( ctx context.Context, desc client.IndexDescription, ) (client.IndexDescription, error) { - txn, err := c.getTxn(ctx, false) + ctx, txn, err := ensureContextTxn(ctx, c.db, false) if err != nil { return client.IndexDescription{}, err } - defer c.discardImplicitTxn(ctx, txn) + defer txn.Discard(ctx) - index, err := c.createIndex(ctx, txn, desc) + index, err := c.createIndex(ctx, desc) if err != nil { return client.IndexDescription{}, err } - return index.Description(), c.commitImplicitTxn(ctx, txn) + return index.Description(), txn.Commit(ctx) } func (c *collection) createIndex( ctx context.Context, - txn datastore.Txn, desc client.IndexDescription, ) (CollectionIndex, error) { + // Don't allow creating index on a permissioned collection, until following is implemented. + // TODO-ACP: ACP <> INDEX https://github.com/sourcenetwork/defradb/issues/2365 + if c.Description().Policy.HasValue() { + return nil, ErrCanNotCreateIndexOnCollectionWithPolicy + } + if desc.Name != "" && !schema.IsValidIndexName(desc.Name) { return nil, schema.NewErrIndexWithInvalidName("!") } @@ -266,20 +275,19 @@ func (c *collection) createIndex( return nil, err } - indexKey, err := c.generateIndexNameIfNeededAndCreateKey(ctx, txn, &desc) + indexKey, err := c.generateIndexNameIfNeededAndCreateKey(ctx, &desc) if err != nil { return nil, err } colSeq, err := c.db.getSequence( ctx, - txn, core.NewIndexIDSequenceKey(c.ID()), ) if err != nil { return nil, err } - colID, err := colSeq.next(ctx, txn) + colID, err := colSeq.next(ctx) if err != nil { return nil, err } @@ -290,6 +298,7 @@ func (c *collection) createIndex( return nil, err } + txn := mustGetContextTxn(ctx) err = txn.Systemstore().Put(ctx, indexKey.ToDS(), buf) if err != nil { return nil, err @@ -300,7 +309,7 @@ func (c *collection) createIndex( } c.def.Description.Indexes = append(c.def.Description.Indexes, colIndex.Description()) c.indexes = append(c.indexes, colIndex) - err = c.indexExistingDocs(ctx, txn, colIndex) + err = c.indexExistingDocs(ctx, colIndex) if err != nil { removeErr := colIndex.RemoveAll(ctx, txn) return nil, errors.Join(err, removeErr) @@ -310,12 +319,25 @@ func (c *collection) createIndex( func (c *collection) iterateAllDocs( ctx context.Context, - txn datastore.Txn, fields []client.FieldDefinition, exec func(doc *client.Document) error, ) error { + txn := mustGetContextTxn(ctx) + identity := GetContextIdentity(ctx) + df := c.newFetcher() - err := df.Init(ctx, txn, c, fields, nil, nil, false, false) + err := df.Init( + ctx, + identity, + txn, + c.db.acp, + c, + fields, + nil, + nil, + false, + false, + ) if err != nil { return errors.Join(err, df.Close()) } @@ -336,7 +358,7 @@ func (c *collection) iterateAllDocs( break } - doc, err := fetcher.Decode(encodedDoc, c.Schema()) + doc, err := fetcher.Decode(encodedDoc, c.Definition()) if err != nil { return errors.Join(err, df.Close()) } @@ -352,7 +374,6 @@ func (c *collection) iterateAllDocs( func (c *collection) indexExistingDocs( ctx context.Context, - txn datastore.Txn, index CollectionIndex, ) error { fields := make([]client.FieldDefinition, 0, 1) @@ -362,8 +383,8 @@ func (c *collection) indexExistingDocs( fields = append(fields, colField) } } - - return c.iterateAllDocs(ctx, txn, fields, func(doc *client.Document) error { + txn := mustGetContextTxn(ctx) + return c.iterateAllDocs(ctx, fields, func(doc *client.Document) error { return index.Save(ctx, txn, doc) }) } @@ -374,24 +395,25 @@ func (c *collection) indexExistingDocs( // // All index artifacts for existing documents related the index will be removed. func (c *collection) DropIndex(ctx context.Context, indexName string) error { - txn, err := c.getTxn(ctx, false) + ctx, txn, err := ensureContextTxn(ctx, c.db, false) if err != nil { return err } - defer c.discardImplicitTxn(ctx, txn) + defer txn.Discard(ctx) - err = c.dropIndex(ctx, txn, indexName) + err = c.dropIndex(ctx, indexName) if err != nil { return err } - return c.commitImplicitTxn(ctx, txn) + return txn.Commit(ctx) } -func (c *collection) dropIndex(ctx context.Context, txn datastore.Txn, indexName string) error { - err := c.loadIndexes(ctx, txn) +func (c *collection) dropIndex(ctx context.Context, indexName string) error { + err := c.loadIndexes(ctx) if err != nil { return err } + txn := mustGetContextTxn(ctx) var didFind bool for i := range c.indexes { @@ -424,7 +446,9 @@ func (c *collection) dropIndex(ctx context.Context, txn datastore.Txn, indexName return nil } -func (c *collection) dropAllIndexes(ctx context.Context, txn datastore.Txn) error { +func (c *collection) dropAllIndexes(ctx context.Context) error { + // callers of this function must set a context transaction + txn := mustGetContextTxn(ctx) prefix := core.NewCollectionIndexKey(immutable.Some(c.ID()), "") keys, err := datastore.FetchKeysForPrefix(ctx, prefix.ToString(), txn.Systemstore()) @@ -442,8 +466,8 @@ func (c *collection) dropAllIndexes(ctx context.Context, txn datastore.Txn) erro return err } -func (c *collection) loadIndexes(ctx context.Context, txn datastore.Txn) error { - indexDescriptions, err := c.db.fetchCollectionIndexDescriptions(ctx, txn, c.ID()) +func (c *collection) loadIndexes(ctx context.Context) error { + indexDescriptions, err := c.db.fetchCollectionIndexDescriptions(ctx, c.ID()) if err != nil { return err } @@ -462,13 +486,13 @@ func (c *collection) loadIndexes(ctx context.Context, txn datastore.Txn) error { // GetIndexes returns all indexes for the collection. func (c *collection) GetIndexes(ctx context.Context) ([]client.IndexDescription, error) { - txn, err := c.getTxn(ctx, false) + ctx, txn, err := ensureContextTxn(ctx, c.db, false) if err != nil { return nil, err } - defer c.discardImplicitTxn(ctx, txn) + defer txn.Discard(ctx) - err = c.loadIndexes(ctx, txn) + err = c.loadIndexes(ctx) if err != nil { return nil, err } @@ -496,9 +520,11 @@ func (c *collection) checkExistingFields( func (c *collection) generateIndexNameIfNeededAndCreateKey( ctx context.Context, - txn datastore.Txn, desc *client.IndexDescription, ) (core.CollectionIndexKey, error) { + // callers of this function must set a context transaction + txn := mustGetContextTxn(ctx) + var indexKey core.CollectionIndexKey if desc.Name == "" { nameIncrement := 1 diff --git a/db/collection_update.go b/db/collection_update.go index fc985d2c41..e59469715a 100644 --- a/db/collection_update.go +++ b/db/collection_update.go @@ -20,33 +20,10 @@ import ( "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/client/request" - "github.com/sourcenetwork/defradb/datastore" "github.com/sourcenetwork/defradb/errors" "github.com/sourcenetwork/defradb/planner" ) -// UpdateWith updates a target document using the given updater type. Target -// can be a Filter statement, a single DocID, a single document, -// an array of DocIDs, or an array of documents. -// If you want more type safety, use the respective typed versions of Update. -// Eg: UpdateWithFilter or UpdateWithDocID -func (c *collection) UpdateWith( - ctx context.Context, - target any, - updater string, -) (*client.UpdateResult, error) { - switch t := target.(type) { - case string, map[string]any, *request.Filter: - return c.UpdateWithFilter(ctx, t, updater) - case client.DocID: - return c.UpdateWithDocID(ctx, t, updater) - case []client.DocID: - return c.UpdateWithDocIDs(ctx, t, updater) - default: - return nil, client.ErrInvalidUpdateTarget - } -} - // UpdateWithFilter updates using a filter to target documents for update. // An updater value is provided, which could be a string Patch, string Merge Patch // or a parsed Patch, or parsed Merge Patch. @@ -55,154 +32,21 @@ func (c *collection) UpdateWithFilter( filter any, updater string, ) (*client.UpdateResult, error) { - txn, err := c.getTxn(ctx, false) - if err != nil { - return nil, err - } - defer c.discardImplicitTxn(ctx, txn) - res, err := c.updateWithFilter(ctx, txn, filter, updater) - if err != nil { - return nil, err - } - return res, c.commitImplicitTxn(ctx, txn) -} - -// UpdateWithDocID updates using a DocID to target a single document for update. -// An updater value is provided, which could be a string Patch, string Merge Patch -// or a parsed Patch, or parsed Merge Patch. -func (c *collection) UpdateWithDocID( - ctx context.Context, - docID client.DocID, - updater string, -) (*client.UpdateResult, error) { - txn, err := c.getTxn(ctx, false) - if err != nil { - return nil, err - } - defer c.discardImplicitTxn(ctx, txn) - res, err := c.updateWithDocID(ctx, txn, docID, updater) - if err != nil { - return nil, err - } - - return res, c.commitImplicitTxn(ctx, txn) -} - -// UpdateWithDocIDs is the same as UpdateWithDocID but accepts multiple DocIDs as a slice. -// An updater value is provided, which could be a string Patch, string Merge Patch -// or a parsed Patch, or parsed Merge Patch. -func (c *collection) UpdateWithDocIDs( - ctx context.Context, - docIDs []client.DocID, - updater string, -) (*client.UpdateResult, error) { - txn, err := c.getTxn(ctx, false) - if err != nil { - return nil, err - } - defer c.discardImplicitTxn(ctx, txn) - res, err := c.updateWithIDs(ctx, txn, docIDs, updater) - if err != nil { - return nil, err - } - - return res, c.commitImplicitTxn(ctx, txn) -} - -func (c *collection) updateWithDocID( - ctx context.Context, - txn datastore.Txn, - docID client.DocID, - updater string, -) (*client.UpdateResult, error) { - parsedUpdater, err := fastjson.Parse(updater) - if err != nil { - return nil, err - } - - isPatch := false - if parsedUpdater.Type() == fastjson.TypeArray { - isPatch = true - } else if parsedUpdater.Type() != fastjson.TypeObject { - return nil, client.ErrInvalidUpdater - } - - doc, err := c.Get(ctx, docID, false) - if err != nil { - return nil, err - } - - if isPatch { - // todo - } else { - err = doc.SetWithJSON([]byte(updater)) - } + ctx, txn, err := ensureContextTxn(ctx, c.db, false) if err != nil { return nil, err } + defer txn.Discard(ctx) - _, err = c.save(ctx, txn, doc, false) + res, err := c.updateWithFilter(ctx, filter, updater) if err != nil { return nil, err } - - results := &client.UpdateResult{ - Count: 1, - DocIDs: []string{docID.String()}, - } - return results, nil -} - -func (c *collection) updateWithIDs( - ctx context.Context, - txn datastore.Txn, - docIDs []client.DocID, - updater string, -) (*client.UpdateResult, error) { - parsedUpdater, err := fastjson.Parse(updater) - if err != nil { - return nil, err - } - - isPatch := false - if parsedUpdater.Type() == fastjson.TypeArray { - isPatch = true - } else if parsedUpdater.Type() != fastjson.TypeObject { - return nil, client.ErrInvalidUpdater - } - - results := &client.UpdateResult{ - DocIDs: make([]string, len(docIDs)), - } - for i, docIDs := range docIDs { - doc, err := c.Get(ctx, docIDs, false) - if err != nil { - return nil, err - } - - if isPatch { - // todo - } else { - err = doc.SetWithJSON([]byte(updater)) - } - if err != nil { - return nil, err - } - - _, err = c.save(ctx, txn, doc, false) - if err != nil { - return nil, err - } - - results.DocIDs[i] = docIDs.String() - results.Count++ - } - return results, nil + return res, txn.Commit(ctx) } func (c *collection) updateWithFilter( ctx context.Context, - txn datastore.Txn, filter any, updater string, ) (*client.UpdateResult, error) { @@ -223,7 +67,7 @@ func (c *collection) updateWithFilter( } // Make a selection plan that will scan through only the documents with matching filter. - selectionPlan, err := c.makeSelectionPlan(ctx, txn, filter) + selectionPlan, err := c.makeSelectionPlan(ctx, filter) if err != nil { return nil, err } @@ -240,7 +84,7 @@ func (c *collection) updateWithFilter( // If the plan isn't properly closed at any exit point log the error. defer func() { if err := selectionPlan.Close(); err != nil { - log.ErrorE(ctx, "Failed to close the selection plan, after filter update", err) + log.ErrorContextE(ctx, "Failed to close the selection plan, after filter update", err) } }() @@ -263,7 +107,7 @@ func (c *collection) updateWithFilter( // Get the document, and apply the patch docAsMap := docMap.ToMap(selectionPlan.Value()) - doc, err := client.NewDocFromMap(docAsMap, c.Schema()) + doc, err := client.NewDocFromMap(docAsMap, c.Definition()) if err != nil { return nil, err } @@ -277,7 +121,7 @@ func (c *collection) updateWithFilter( } } - _, err = c.save(ctx, txn, doc, false) + err = c.update(ctx, doc) if err != nil { return nil, err } @@ -310,7 +154,6 @@ func (c *collection) isSecondaryIDField(fieldDesc client.FieldDefinition) (clien // patched. func (c *collection) patchPrimaryDoc( ctx context.Context, - txn datastore.Txn, secondaryCollectionName string, relationFieldDescription client.FieldDefinition, docID string, @@ -321,18 +164,15 @@ func (c *collection) patchPrimaryDoc( return err } - primaryCol, err := c.db.getCollectionByName(ctx, txn, relationFieldDescription.Schema) + primaryCol, err := c.db.getCollectionByName(ctx, relationFieldDescription.Kind.Underlying()) if err != nil { return err } - primaryCol = primaryCol.WithTxn(txn) - primarySchema := primaryCol.Schema() primaryField, ok := primaryCol.Description().GetFieldByRelation( relationFieldDescription.RelationName, secondaryCollectionName, relationFieldDescription.Name, - &primarySchema, ) if !ok { return client.NewErrFieldNotExist(relationFieldDescription.RelationName) @@ -348,6 +188,7 @@ func (c *collection) patchPrimaryDoc( primaryDocID, false, ) + if err != nil && !errors.Is(err, ds.ErrNotFound) { return err } @@ -357,8 +198,13 @@ func (c *collection) patchPrimaryDoc( return nil } - pc := c.db.newCollection(primaryCol.Description(), primarySchema) - err = pc.validateOneToOneLinkDoesntAlreadyExist(ctx, txn, primaryDocID.String(), primaryIDField, docID) + pc := c.db.newCollection(primaryCol.Description(), primaryCol.Schema()) + err = pc.validateOneToOneLinkDoesntAlreadyExist( + ctx, + primaryDocID.String(), + primaryIDField, + docID, + ) if err != nil { return err } @@ -391,7 +237,6 @@ func (c *collection) patchPrimaryDoc( // Additionally it only requests for the root scalar fields of the object func (c *collection) makeSelectionPlan( ctx context.Context, - txn datastore.Txn, filter any, ) (planner.RequestPlan, error) { var f immutable.Option[request.Filter] @@ -417,7 +262,16 @@ func (c *collection) makeSelectionPlan( return nil, err } - planner := planner.New(ctx, c.db.WithTxn(txn), txn) + txn := mustGetContextTxn(ctx) + identity := GetContextIdentity(ctx) + planner := planner.New( + ctx, + identity, + c.db.acp, + c.db, + txn, + ) + return planner.MakePlan(&request.Request{ Queries: []*request.OperationDefinition{ { @@ -434,8 +288,12 @@ func (c *collection) makeSelectLocal(filter immutable.Option[request.Filter]) (* Field: request.Field{ Name: c.Name().Value(), }, - Filter: filter, - Fields: make([]request.Selection, 0), + Filterable: request.Filterable{ + Filter: filter, + }, + ChildSelect: request.ChildSelect{ + Fields: make([]request.Selection, 0), + }, } for _, fd := range c.Schema().Fields { diff --git a/db/config.go b/db/config.go new file mode 100644 index 0000000000..397956ed8b --- /dev/null +++ b/db/config.go @@ -0,0 +1,73 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package db + +import ( + "context" + + "github.com/lens-vm/lens/host-go/engine/module" + "github.com/sourcenetwork/immutable" + + "github.com/sourcenetwork/defradb/acp" + "github.com/sourcenetwork/defradb/events" +) + +const ( + defaultMaxTxnRetries = 5 + updateEventBufferSize = 100 +) + +// Option is a funtion that sets a config value on the db. +type Option func(*db) + +// WithACP enables access control. If path is empty then acp runs in-memory. +func WithACP(path string) Option { + return func(db *db) { + var acpLocal acp.ACPLocal + acpLocal.Init(context.Background(), path) + db.acp = immutable.Some[acp.ACP](&acpLocal) + } +} + +// WithACPInMemory enables access control in-memory. +func WithACPInMemory() Option { return WithACP("") } + +// WithUpdateEvents enables the update events channel. +func WithUpdateEvents() Option { + return func(db *db) { + db.events = events.Events{ + Updates: immutable.Some(events.New[events.Update](0, updateEventBufferSize)), + } + } +} + +// WithMaxRetries sets the maximum number of retries per transaction. +func WithMaxRetries(num int) Option { + return func(db *db) { + db.maxTxnRetries = immutable.Some(num) + } +} + +// WithLensPoolSize sets the maximum number of cached migrations instances to preserve per schema version. +// +// Will default to `5` if not set. +func WithLensPoolSize(size int) Option { + return func(db *db) { + db.lensPoolSize = immutable.Some(size) + } +} + +// WithLensRuntime returns an option that sets the lens registry runtime. +func WithLensRuntime(runtime module.Runtime) Option { + return func(db *db) { + db.lensRuntime = immutable.Some(runtime) + } +} diff --git a/db/config_test.go b/db/config_test.go new file mode 100644 index 0000000000..02bd81a910 --- /dev/null +++ b/db/config_test.go @@ -0,0 +1,55 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package db + +import ( + "testing" + + "github.com/lens-vm/lens/host-go/runtimes/wasmtime" + "github.com/stretchr/testify/assert" +) + +func TestWithACP(t *testing.T) { + d := &db{} + WithACP("test")(d) + assert.True(t, d.acp.HasValue()) +} + +func TestWithACPInMemory(t *testing.T) { + d := &db{} + WithACPInMemory()(d) + assert.True(t, d.acp.HasValue()) +} + +func TestWithUpdateEvents(t *testing.T) { + d := &db{} + WithUpdateEvents()(d) + assert.NotNil(t, d.events) +} + +func TestWithMaxRetries(t *testing.T) { + d := &db{} + WithMaxRetries(10)(d) + assert.True(t, d.maxTxnRetries.HasValue()) + assert.Equal(t, 10, d.maxTxnRetries.Value()) +} + +func TestWithLensPoolSize(t *testing.T) { + d := &db{} + WithLensPoolSize(10)(d) + assert.Equal(t, 10, d.lensPoolSize.Value()) +} + +func TestWithLensRuntime(t *testing.T) { + d := &db{} + WithLensRuntime(wasmtime.New())(d) + assert.NotNil(t, d.lensRuntime.Value()) +} diff --git a/db/context.go b/db/context.go new file mode 100644 index 0000000000..88019af323 --- /dev/null +++ b/db/context.go @@ -0,0 +1,112 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package db + +import ( + "context" + + "github.com/sourcenetwork/immutable" + + acpIdentity "github.com/sourcenetwork/defradb/acp/identity" + "github.com/sourcenetwork/defradb/datastore" +) + +// txnContextKey is the key type for transaction context values. +type txnContextKey struct{} + +// identityContextKey is the key type for ACP identity context values. +type identityContextKey struct{} + +// explicitTxn is a transaction that is managed outside of a db operation. +type explicitTxn struct { + datastore.Txn +} + +func (t *explicitTxn) Commit(ctx context.Context) error { + return nil // do nothing +} + +func (t *explicitTxn) Discard(ctx context.Context) { + // do nothing +} + +// transactionDB is a db that can create transactions. +type transactionDB interface { + NewTxn(context.Context, bool) (datastore.Txn, error) +} + +// ensureContextTxn ensures that the returned context has a transaction +// and an identity. +// +// If a transactions exists on the context it will be made explicit, +// otherwise a new implicit transaction will be created. +// +// The returned context will contain the transaction and identity +// along with the copied values from the input context. +func ensureContextTxn(ctx context.Context, db transactionDB, readOnly bool) (context.Context, datastore.Txn, error) { + // explicit transaction + txn, ok := TryGetContextTxn(ctx) + if ok { + return SetContextTxn(ctx, &explicitTxn{txn}), &explicitTxn{txn}, nil + } + // implicit transaction + txn, err := db.NewTxn(ctx, readOnly) + if err != nil { + return nil, txn, err + } + return SetContextTxn(ctx, txn), txn, nil +} + +// mustGetContextTxn returns the transaction from the context or panics. +// +// This should only be called from private functions within the db package +// where we ensure an implicit or explicit transaction always exists. +func mustGetContextTxn(ctx context.Context) datastore.Txn { + return ctx.Value(txnContextKey{}).(datastore.Txn) +} + +// TryGetContextTxn returns a transaction and a bool indicating if the +// txn was retrieved from the given context. +func TryGetContextTxn(ctx context.Context) (datastore.Txn, bool) { + txn, ok := ctx.Value(txnContextKey{}).(datastore.Txn) + return txn, ok +} + +// SetContextTxn returns a new context with the txn value set. +// +// This will overwrite any previously set transaction value. +func SetContextTxn(ctx context.Context, txn datastore.Txn) context.Context { + return context.WithValue(ctx, txnContextKey{}, txn) +} + +// TryGetContextTxn returns an identity and a bool indicating if the +// identity was retrieved from the given context. + +// GetContextIdentity returns the identity from the given context. +// +// If an identity does not exist `NoIdentity` is returned. +func GetContextIdentity(ctx context.Context) immutable.Option[acpIdentity.Identity] { + identity, ok := ctx.Value(identityContextKey{}).(acpIdentity.Identity) + if ok { + return immutable.Some(identity) + } + return acpIdentity.None +} + +// SetContextTxn returns a new context with the identity value set. +// +// This will overwrite any previously set identity value. +func SetContextIdentity(ctx context.Context, identity immutable.Option[acpIdentity.Identity]) context.Context { + if identity.HasValue() { + return context.WithValue(ctx, identityContextKey{}, identity.Value()) + } + return context.WithValue(ctx, identityContextKey{}, nil) +} diff --git a/db/context_test.go b/db/context_test.go new file mode 100644 index 0000000000..c8b1a322e5 --- /dev/null +++ b/db/context_test.go @@ -0,0 +1,57 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package db + +import ( + "context" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestEnsureContextTxnExplicit(t *testing.T) { + ctx := context.Background() + + db, err := newMemoryDB(ctx) + require.NoError(t, err) + + txn, err := db.NewTxn(ctx, true) + require.NoError(t, err) + + // set an explicit transaction + ctx = SetContextTxn(ctx, txn) + + ctx, txn, err = ensureContextTxn(ctx, db, true) + require.NoError(t, err) + + _, ok := txn.(*explicitTxn) + assert.True(t, ok) + + _, ok = ctx.Value(txnContextKey{}).(*explicitTxn) + assert.True(t, ok) +} + +func TestEnsureContextTxnImplicit(t *testing.T) { + ctx := context.Background() + + db, err := newMemoryDB(ctx) + require.NoError(t, err) + + ctx, txn, err := ensureContextTxn(ctx, db, true) + require.NoError(t, err) + + _, ok := txn.(*explicitTxn) + assert.False(t, ok) + + _, ok = ctx.Value(txnContextKey{}).(*explicitTxn) + assert.False(t, ok) +} diff --git a/db/db.go b/db/db.go index 7b3ff7bcb8..613eea0b23 100644 --- a/db/db.go +++ b/db/db.go @@ -22,20 +22,22 @@ import ( blockstore "github.com/ipfs/boxo/blockstore" ds "github.com/ipfs/go-datastore" dsq "github.com/ipfs/go-datastore/query" + "github.com/lens-vm/lens/host-go/engine/module" + "github.com/sourcenetwork/corelog" "github.com/sourcenetwork/immutable" + "github.com/sourcenetwork/defradb/acp" "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/core" "github.com/sourcenetwork/defradb/datastore" "github.com/sourcenetwork/defradb/errors" "github.com/sourcenetwork/defradb/events" "github.com/sourcenetwork/defradb/lens" - "github.com/sourcenetwork/defradb/logging" "github.com/sourcenetwork/defradb/request/graphql" ) var ( - log = logging.MustNewLogger("db") + log = corelog.NewLogger("db") ) // make sure we match our client interface @@ -43,10 +45,6 @@ var ( _ client.Collection = (*collection)(nil) ) -const ( - defaultMaxTxnRetries = 5 -) - // DB is the main interface for interacting with the // DefraDB storage system. type db struct { @@ -57,59 +55,41 @@ type db struct { events events.Events - parser core.Parser + parser core.Parser + + // The maximum number of cached migrations instances to preserve per schema version. + lensPoolSize immutable.Option[int] + lensRuntime immutable.Option[module.Runtime] + lensRegistry client.LensRegistry // The maximum number of retries per transaction. maxTxnRetries immutable.Option[int] - // The maximum number of cached migrations instances to preserve per schema version. - lensPoolSize immutable.Option[int] - // The options used to init the database - options any + options []Option // The ID of the last transaction created. previousTxnID atomic.Uint64 -} - -// Functional option type. -type Option func(*db) -const updateEventBufferSize = 100 - -// WithUpdateEvents enables the update events channel. -func WithUpdateEvents() Option { - return func(db *db) { - db.events = events.Events{ - Updates: immutable.Some(events.New[events.Update](0, updateEventBufferSize)), - } - } -} - -// WithMaxRetries sets the maximum number of retries per transaction. -func WithMaxRetries(num int) Option { - return func(db *db) { - db.maxTxnRetries = immutable.Some(num) - } -} - -// WithLensPoolSize sets the maximum number of cached migrations instances to preserve per schema version. -// -// Will default to `5` if not set. -func WithLensPoolSize(num int) Option { - return func(db *db) { - db.lensPoolSize = immutable.Some(num) - } + // Contains ACP if it exists + acp immutable.Option[acp.ACP] } // NewDB creates a new instance of the DB using the given options. -func NewDB(ctx context.Context, rootstore datastore.RootStore, options ...Option) (client.DB, error) { +func NewDB( + ctx context.Context, + rootstore datastore.RootStore, + options ...Option, +) (client.DB, error) { return newDB(ctx, rootstore, options...) } -func newDB(ctx context.Context, rootstore datastore.RootStore, options ...Option) (*implicitTxnDB, error) { - log.Debug(ctx, "Loading: internal datastores") +func newDB( + ctx context.Context, + rootstore datastore.RootStore, + options ...Option, +) (*db, error) { multistore := datastore.MultiStoreFrom(rootstore) parser, err := graphql.NewParser() @@ -120,29 +100,26 @@ func newDB(ctx context.Context, rootstore datastore.RootStore, options ...Option db := &db{ rootstore: rootstore, multistore: multistore, - - parser: parser, - options: options, + acp: acp.NoACP, + parser: parser, + options: options, } // apply options for _, opt := range options { - if opt == nil { - continue - } opt(db) } - // lensPoolSize may be set by `options`, and because they are funcs on db + // lens options may be set by `WithLens` funcs, and because they are funcs on db // we have to mutate `db` here to set the registry. - db.lensRegistry = lens.NewRegistry(db.lensPoolSize, db) + db.lensRegistry = lens.NewRegistry(db, db.lensPoolSize, db.lensRuntime) err = db.initialize(ctx) if err != nil { return nil, err } - return &implicitTxnDB{db}, nil + return db, nil } // NewTxn creates a new transaction. @@ -157,15 +134,6 @@ func (db *db) NewConcurrentTxn(ctx context.Context, readonly bool) (datastore.Tx return datastore.NewConcurrentTxnFrom(ctx, db.rootstore, txnId, readonly) } -// WithTxn returns a new [client.Store] that respects the given transaction. -func (db *db) WithTxn(txn datastore.Txn) client.Store { - return &explicitTxnDB{ - db: db, - txn: txn, - lensRegistry: db.lensRegistry.WithTxn(txn), - } -} - // Root returns the root datastore. func (db *db) Root() datastore.RootStore { return db.rootstore @@ -185,19 +153,47 @@ func (db *db) LensRegistry() client.LensRegistry { return db.lensRegistry } +func (db *db) AddPolicy( + ctx context.Context, + policy string, +) (client.AddPolicyResult, error) { + if !db.acp.HasValue() { + return client.AddPolicyResult{}, client.ErrPolicyAddFailureNoACP + } + identity := GetContextIdentity(ctx) + policyID, err := db.acp.Value().AddPolicy( + ctx, + identity.Value().String(), + policy, + ) + + if err != nil { + return client.AddPolicyResult{}, err + } + + return client.AddPolicyResult{PolicyID: policyID}, nil +} + // Initialize is called when a database is first run and creates all the db global meta data // like Collection ID counters. func (db *db) initialize(ctx context.Context) error { db.glock.Lock() defer db.glock.Unlock() - txn, err := db.NewTxn(ctx, false) + ctx, txn, err := ensureContextTxn(ctx, db, false) if err != nil { return err } defer txn.Discard(ctx) - log.Debug(ctx, "Checking if DB has already been initialized...") + // Start acp if enabled, this will recover previous state if there is any. + if db.acp.HasValue() { + // db is responsible to call db.acp.Close() to free acp resources while closing. + if err = db.acp.Value().Start(ctx); err != nil { + return err + } + } + exists, err := txn.Systemstore().Has(ctx, ds.NewKey("init")) if err != nil && !errors.Is(err, ds.ErrNotFound) { return err @@ -205,8 +201,7 @@ func (db *db) initialize(ctx context.Context) error { // if we're loading an existing database, just load the schema // and migrations and finish initialization if exists { - log.Debug(ctx, "DB has already been initialized, continuing") - err = db.loadSchema(ctx, txn) + err = db.loadSchema(ctx) if err != nil { return err } @@ -222,11 +217,9 @@ func (db *db) initialize(ctx context.Context) error { return txn.Commit(ctx) } - log.Debug(ctx, "Opened a new DB, needs full initialization") - // init meta data // collection sequence - _, err = db.getSequence(ctx, txn, core.CollectionIDSequenceKey{}) + _, err = db.getSequence(ctx, core.CollectionIDSequenceKey{}) if err != nil { return err } @@ -261,16 +254,23 @@ func (db *db) PrintDump(ctx context.Context) error { // Close is called when we are shutting down the database. // This is the place for any last minute cleanup or releasing of resources (i.e.: Badger instance). func (db *db) Close() { - log.Info(context.Background(), "Closing DefraDB process...") + log.Info("Closing DefraDB process...") if db.events.Updates.HasValue() { db.events.Updates.Value().Close() } err := db.rootstore.Close() if err != nil { - log.ErrorE(context.Background(), "Failure closing running process", err) + log.ErrorE("Failure closing running process", err) } - log.Info(context.Background(), "Successfully closed running process") + + if db.acp.HasValue() { + if err := db.acp.Value().Close(); err != nil { + log.ErrorE("Failure closing acp", err) + } + } + + log.Info("Successfully closed running process") } func printStore(ctx context.Context, store datastore.DSReaderWriter) error { @@ -286,7 +286,7 @@ func printStore(ctx context.Context, store datastore.DSReaderWriter) error { } for r := range results.Next() { - log.Info(ctx, "", logging.NewKV(r.Key, r.Value)) + log.InfoContext(ctx, "", corelog.Any(r.Key, r.Value)) } return results.Close() diff --git a/db/db_test.go b/db/db_test.go index 237a1f21ed..118adb285b 100644 --- a/db/db_test.go +++ b/db/db_test.go @@ -19,7 +19,7 @@ import ( badgerds "github.com/sourcenetwork/defradb/datastore/badger/v4" ) -func newMemoryDB(ctx context.Context) (*implicitTxnDB, error) { +func newMemoryDB(ctx context.Context) (*db, error) { opts := badgerds.Options{Options: badger.DefaultOptions("").WithInMemory(true)} rootstore, err := badgerds.NewDatastore("", &opts) if err != nil { diff --git a/db/description/collection.go b/db/description/collection.go index 8ffd473053..3658d3d318 100644 --- a/db/description/collection.go +++ b/db/description/collection.go @@ -13,8 +13,10 @@ package description import ( "context" "encoding/json" + "errors" "sort" + ds "github.com/ipfs/go-datastore" "github.com/ipfs/go-datastore/query" "github.com/sourcenetwork/defradb/client" @@ -29,6 +31,11 @@ func SaveCollection( txn datastore.Txn, desc client.CollectionDescription, ) (client.CollectionDescription, error) { + existing, err := GetCollectionByID(ctx, txn, desc.ID) + if err != nil && !errors.Is(err, ds.ErrNotFound) { + return client.CollectionDescription{}, err + } + buf, err := json.Marshal(desc) if err != nil { return client.CollectionDescription{}, err @@ -40,6 +47,35 @@ func SaveCollection( return client.CollectionDescription{}, err } + if existing.Name.HasValue() && existing.Name != desc.Name { + nameKey := core.NewCollectionNameKey(existing.Name.Value()) + idBuf, err := txn.Systemstore().Get(ctx, nameKey.ToDS()) + nameIndexExsts := true + if err != nil { + if errors.Is(err, ds.ErrNotFound) { + nameIndexExsts = false + } else { + return client.CollectionDescription{}, err + } + } + if nameIndexExsts { + var keyID uint32 + err = json.Unmarshal(idBuf, &keyID) + if err != nil { + return client.CollectionDescription{}, err + } + + if keyID == desc.ID { + // The name index may have already been overwritten, pointing at another collection + // we should only remove the existing index if it still points at this collection + err := txn.Systemstore().Delete(ctx, nameKey.ToDS()) + if err != nil { + return client.CollectionDescription{}, err + } + } + } + } + if desc.Name.HasValue() { idBuf, err := json.Marshal(desc.ID) if err != nil { @@ -201,7 +237,7 @@ func GetCollections( txn datastore.Txn, ) ([]client.CollectionDescription, error) { q, err := txn.Systemstore().Query(ctx, query.Query{ - Prefix: core.COLLECTION, + Prefix: core.COLLECTION_ID, }) if err != nil { return nil, NewErrFailedToCreateCollectionQuery(err) diff --git a/db/errors.go b/db/errors.go index 34dd0d53b5..f917ee9724 100644 --- a/db/errors.go +++ b/db/errors.go @@ -16,95 +16,117 @@ import ( ) const ( - errFailedToGetHeads string = "failed to get document heads" - errFailedToCreateCollectionQuery string = "failed to create collection prefix query" - errFailedToGetCollection string = "failed to get collection" - errFailedToGetAllCollections string = "failed to get all collections" - errDocVerification string = "the document verification failed" - errAddingP2PCollection string = "cannot add collection ID" - errRemovingP2PCollection string = "cannot remove collection ID" - errAddCollectionWithPatch string = "unknown collection, adding collections via patch is not supported" - errCollectionIDDoesntMatch string = "CollectionID does not match existing" - errSchemaRootDoesntMatch string = "SchemaRoot does not match existing" - errCannotModifySchemaName string = "modifying the schema name is not supported" - errCannotSetVersionID string = "setting the VersionID is not supported. It is updated automatically" - errRelationalFieldMissingSchema string = "a `Schema` [name] must be provided when adding a new relation field" - errRelationalFieldInvalidRelationType string = "invalid RelationType" - errRelationalFieldMissingIDField string = "missing id field for relation object field" - errRelationalFieldMissingRelationName string = "missing relation name" - errPrimarySideNotDefined string = "primary side of relation not defined" - errPrimarySideOnMany string = "cannot set the many side of a relation as primary" - errBothSidesPrimary string = "both sides of a relation cannot be primary" - errRelatedFieldKindMismatch string = "invalid Kind of the related field" - errRelatedFieldRelationTypeMismatch string = "invalid RelationType of the related field" - errRelationalFieldIDInvalidType string = "relational id field of invalid kind" - errDuplicateField string = "duplicate field" - errCannotMutateField string = "mutating an existing field is not supported" - errCannotMoveField string = "moving fields is not currently supported" - errCannotDeleteField string = "deleting an existing field is not supported" - errFieldKindNotFound string = "no type found for given name" - errFieldKindDoesNotMatchFieldSchema string = "field Kind does not match field Schema" - errSchemaNotFound string = "no schema found for given name" - errDocumentAlreadyExists string = "a document with the given ID already exists" - errDocumentDeleted string = "a document with the given ID has been deleted" - errIndexMissingFields string = "index missing fields" - errNonZeroIndexIDProvided string = "non-zero index ID provided" - errIndexFieldMissingName string = "index field missing name" - errIndexFieldMissingDirection string = "index field missing direction" - errIndexWithNameAlreadyExists string = "index with name already exists" - errInvalidStoredIndex string = "invalid stored index" - errInvalidStoredIndexKey string = "invalid stored index key" - errNonExistingFieldForIndex string = "creating an index on a non-existing property" - errCollectionDoesntExisting string = "collection with given name doesn't exist" - errFailedToStoreIndexedField string = "failed to store indexed field" - errFailedToReadStoredIndexDesc string = "failed to read stored index description" - errCanNotDeleteIndexedField string = "can not delete indexed field" - errCanNotAddIndexWithPatch string = "adding indexes via patch is not supported" - errCanNotDropIndexWithPatch string = "dropping indexes via patch is not supported" - errCanNotChangeIndexWithPatch string = "changing indexes via patch is not supported" - errIndexWithNameDoesNotExists string = "index with name doesn't exists" - errCorruptedIndex string = "corrupted index. Please delete and recreate the index" - errInvalidFieldValue string = "invalid field value" - errUnsupportedIndexFieldType string = "unsupported index field type" - errIndexDescriptionHasNoFields string = "index description has no fields" - errFieldOrAliasToFieldNotExist string = "The given field or alias to field does not exist" - errCreateFile string = "failed to create file" - errRemoveFile string = "failed to remove file" - errOpenFile string = "failed to open file" - errCloseFile string = "failed to close file" - errFailedtoCloseQueryReqAllIDs string = "failed to close query requesting all docIDs" - errFailedToReadByte string = "failed to read byte" - errFailedToWriteString string = "failed to write string" - errJSONDecode string = "failed to decode JSON" - errDocFromMap string = "failed to create a new doc from map" - errDocCreate string = "failed to save a new doc to collection" - errDocUpdate string = "failed to update doc to collection" - errExpectedJSONObject string = "expected JSON object" - errExpectedJSONArray string = "expected JSON array" - errOneOneAlreadyLinked string = "target document is already linked to another document" - errIndexDoesNotMatchName string = "the index used does not match the given name" - errCanNotIndexNonUniqueFields string = "can not index a doc's field(s) that violates unique index" - errInvalidViewQuery string = "the query provided is not valid as a View" + errFailedToGetHeads string = "failed to get document heads" + errFailedToCreateCollectionQuery string = "failed to create collection prefix query" + errFailedToGetCollection string = "failed to get collection" + errFailedToGetAllCollections string = "failed to get all collections" + errDocVerification string = "the document verification failed" + errAddingP2PCollection string = "cannot add collection ID" + errRemovingP2PCollection string = "cannot remove collection ID" + errAddCollectionWithPatch string = "adding collections via patch is not supported" + errCollectionIDDoesntMatch string = "CollectionID does not match existing" + errSchemaRootDoesntMatch string = "SchemaRoot does not match existing" + errCannotModifySchemaName string = "modifying the schema name is not supported" + errCannotSetVersionID string = "setting the VersionID is not supported" + errRelationalFieldInvalidRelationType string = "invalid RelationType" + errRelationalFieldMissingIDField string = "missing id field for relation object field" + errRelatedFieldKindMismatch string = "invalid Kind of the related field" + errRelatedFieldRelationTypeMismatch string = "invalid RelationType of the related field" + errRelationalFieldIDInvalidType string = "relational id field of invalid kind" + errDuplicateField string = "duplicate field" + errCannotMutateField string = "mutating an existing field is not supported" + errCannotMoveField string = "moving fields is not currently supported" + errCannotDeleteField string = "deleting an existing field is not supported" + errFieldKindNotFound string = "no type found for given name" + errFieldKindDoesNotMatchFieldSchema string = "field Kind does not match field Schema" + errDocumentAlreadyExists string = "a document with the given ID already exists" + errDocumentDeleted string = "a document with the given ID has been deleted" + errIndexMissingFields string = "index missing fields" + errNonZeroIndexIDProvided string = "non-zero index ID provided" + errIndexFieldMissingName string = "index field missing name" + errIndexFieldMissingDirection string = "index field missing direction" + errIndexWithNameAlreadyExists string = "index with name already exists" + errInvalidStoredIndex string = "invalid stored index" + errInvalidStoredIndexKey string = "invalid stored index key" + errNonExistingFieldForIndex string = "creating an index on a non-existing property" + errCollectionDoesntExisting string = "collection with given name doesn't exist" + errFailedToStoreIndexedField string = "failed to store indexed field" + errFailedToReadStoredIndexDesc string = "failed to read stored index description" + errCanNotDeleteIndexedField string = "can not delete indexed field" + errCanNotAddIndexWithPatch string = "adding indexes via patch is not supported" + errCanNotDropIndexWithPatch string = "dropping indexes via patch is not supported" + errCanNotChangeIndexWithPatch string = "changing indexes via patch is not supported" + errIndexWithNameDoesNotExists string = "index with name doesn't exists" + errCorruptedIndex string = "corrupted index. Please delete and recreate the index" + errInvalidFieldValue string = "invalid field value" + errUnsupportedIndexFieldType string = "unsupported index field type" + errIndexDescriptionHasNoFields string = "index description has no fields" + errFieldOrAliasToFieldNotExist string = "The given field or alias to field does not exist" + errCreateFile string = "failed to create file" + errRemoveFile string = "failed to remove file" + errOpenFile string = "failed to open file" + errCloseFile string = "failed to close file" + errFailedtoCloseQueryReqAllIDs string = "failed to close query requesting all docIDs" + errFailedToReadByte string = "failed to read byte" + errFailedToWriteString string = "failed to write string" + errJSONDecode string = "failed to decode JSON" + errDocFromMap string = "failed to create a new doc from map" + errDocCreate string = "failed to save a new doc to collection" + errDocUpdate string = "failed to update doc to collection" + errExpectedJSONObject string = "expected JSON object" + errExpectedJSONArray string = "expected JSON array" + errOneOneAlreadyLinked string = "target document is already linked to another document" + errIndexDoesNotMatchName string = "the index used does not match the given name" + errCanNotIndexNonUniqueFields string = "can not index a doc's field(s) that violates unique index" + errInvalidViewQuery string = "the query provided is not valid as a View" + errCollectionAlreadyExists string = "collection already exists" + errMultipleActiveCollectionVersions string = "multiple versions of same collection cannot be active" + errCollectionSourcesCannotBeAddedRemoved string = "collection sources cannot be added or removed" + errCollectionSourceIDMutated string = "collection source ID cannot be mutated" + errCollectionIndexesCannotBeMutated string = "collection indexes cannot be mutated" + errCollectionFieldsCannotBeMutated string = "collection fields cannot be mutated" + errCollectionPolicyCannotBeMutated string = "collection policy cannot be mutated" + errCollectionRootIDCannotBeMutated string = "collection root ID cannot be mutated" + errCollectionSchemaVersionIDCannotBeMutated string = "collection schema version ID cannot be mutated" + errCollectionIDCannotBeZero string = "collection ID cannot be zero" + errCollectionsCannotBeDeleted string = "collections cannot be deleted" + errCanNotHavePolicyWithoutACP string = "can not specify policy on collection, without acp" + errSecondaryFieldOnSchema string = "secondary relation fields cannot be defined on the schema" + errRelationMissingField string = "relation missing field" ) var ( - ErrFailedToGetCollection = errors.New(errFailedToGetCollection) - ErrSubscriptionsNotAllowed = errors.New("server does not accept subscriptions") - ErrInvalidFilter = errors.New("invalid filter") - ErrCollectionAlreadyExists = errors.New("collection already exists") - ErrCollectionNameEmpty = errors.New("collection name can't be empty") - ErrSchemaNameEmpty = errors.New("schema name can't be empty") - ErrSchemaRootEmpty = errors.New("schema root can't be empty") - ErrSchemaVersionIDEmpty = errors.New("schema version ID can't be empty") - ErrKeyEmpty = errors.New("key cannot be empty") - ErrCannotSetVersionID = errors.New(errCannotSetVersionID) - ErrIndexMissingFields = errors.New(errIndexMissingFields) - ErrIndexFieldMissingName = errors.New(errIndexFieldMissingName) - ErrCorruptedIndex = errors.New(errCorruptedIndex) - ErrExpectedJSONObject = errors.New(errExpectedJSONObject) - ErrExpectedJSONArray = errors.New(errExpectedJSONArray) - ErrInvalidViewQuery = errors.New(errInvalidViewQuery) - ErrCanNotIndexNonUniqueFields = errors.New(errCanNotIndexNonUniqueFields) + ErrFailedToGetCollection = errors.New(errFailedToGetCollection) + ErrCanNotCreateIndexOnCollectionWithPolicy = errors.New("can not create index on a collection with a policy") + ErrSubscriptionsNotAllowed = errors.New("server does not accept subscriptions") + ErrInvalidFilter = errors.New("invalid filter") + ErrCollectionAlreadyExists = errors.New(errCollectionAlreadyExists) + ErrCollectionNameEmpty = errors.New("collection name can't be empty") + ErrSchemaNameEmpty = errors.New("schema name can't be empty") + ErrSchemaRootEmpty = errors.New("schema root can't be empty") + ErrSchemaVersionIDEmpty = errors.New("schema version ID can't be empty") + ErrKeyEmpty = errors.New("key cannot be empty") + ErrCannotSetVersionID = errors.New(errCannotSetVersionID) + ErrIndexMissingFields = errors.New(errIndexMissingFields) + ErrIndexFieldMissingName = errors.New(errIndexFieldMissingName) + ErrCorruptedIndex = errors.New(errCorruptedIndex) + ErrExpectedJSONObject = errors.New(errExpectedJSONObject) + ErrExpectedJSONArray = errors.New(errExpectedJSONArray) + ErrInvalidViewQuery = errors.New(errInvalidViewQuery) + ErrCanNotIndexNonUniqueFields = errors.New(errCanNotIndexNonUniqueFields) + ErrMultipleActiveCollectionVersions = errors.New(errMultipleActiveCollectionVersions) + ErrCollectionSourcesCannotBeAddedRemoved = errors.New(errCollectionSourcesCannotBeAddedRemoved) + ErrCollectionSourceIDMutated = errors.New(errCollectionSourceIDMutated) + ErrCollectionIndexesCannotBeMutated = errors.New(errCollectionIndexesCannotBeMutated) + ErrCollectionFieldsCannotBeMutated = errors.New(errCollectionFieldsCannotBeMutated) + ErrCollectionRootIDCannotBeMutated = errors.New(errCollectionRootIDCannotBeMutated) + ErrCollectionSchemaVersionIDCannotBeMutated = errors.New(errCollectionSchemaVersionIDCannotBeMutated) + ErrCollectionIDCannotBeZero = errors.New(errCollectionIDCannotBeZero) + ErrCollectionsCannotBeDeleted = errors.New(errCollectionsCannotBeDeleted) + ErrCanNotHavePolicyWithoutACP = errors.New(errCanNotHavePolicyWithoutACP) + ErrSecondaryFieldOnSchema = errors.New(errSecondaryFieldOnSchema) + ErrRelationMissingField = errors.New(errRelationMissingField) + ErrMultipleRelationPrimaries = errors.New("relation can only have a single field set as primary") ) // NewErrFailedToGetHeads returns a new error indicating that the heads of a document @@ -208,6 +230,13 @@ func NewErrAddCollectionWithPatch(name string) error { ) } +func NewErrAddCollectionIDWithPatch(id uint32) error { + return errors.New( + errAddCollectionWithPatch, + errors.NewKV("ID", id), + ) +} + func NewErrCollectionIDDoesntMatch(name string, existingID, proposedID uint32) error { return errors.New( errCollectionIDDoesntMatch, @@ -234,14 +263,6 @@ func NewErrCannotModifySchemaName(existingName, proposedName string) error { ) } -func NewErrRelationalFieldMissingSchema(name string, kind client.FieldKind) error { - return errors.New( - errRelationalFieldMissingSchema, - errors.NewKV("Field", name), - errors.NewKV("Kind", kind), - ) -} - func NewErrRelationalFieldMissingIDField(name string, expectedName string) error { return errors.New( errRelationalFieldMissingIDField, @@ -250,34 +271,6 @@ func NewErrRelationalFieldMissingIDField(name string, expectedName string) error ) } -func NewErrRelationalFieldMissingRelationName(name string) error { - return errors.New( - errRelationalFieldMissingRelationName, - errors.NewKV("Field", name), - ) -} - -func NewErrPrimarySideNotDefined(relationName string) error { - return errors.New( - errPrimarySideNotDefined, - errors.NewKV("RelationName", relationName), - ) -} - -func NewErrPrimarySideOnMany(name string) error { - return errors.New( - errPrimarySideOnMany, - errors.NewKV("Field", name), - ) -} - -func NewErrBothSidesPrimary(relationName string) error { - return errors.New( - errBothSidesPrimary, - errors.NewKV("RelationName", relationName), - ) -} - func NewErrRelatedFieldKindMismatch(relationName string, expected client.FieldKind, actual client.FieldKind) error { return errors.New( errRelatedFieldKindMismatch, @@ -296,9 +289,10 @@ func NewErrRelationalFieldIDInvalidType(name string, expected, actual client.Fie ) } -func NewErrFieldKindNotFound(kind string) error { +func NewErrFieldKindNotFound(name string, kind string) error { return errors.New( errFieldKindNotFound, + errors.NewKV("Field", name), errors.NewKV("Kind", kind), ) } @@ -311,14 +305,6 @@ func NewErrFieldKindDoesNotMatchFieldSchema(kind string, schema string) error { ) } -func NewErrSchemaNotFound(name string, schema string) error { - return errors.New( - errSchemaNotFound, - errors.NewKV("Field", name), - errors.NewKV("Schema", schema), - ) -} - func NewErrDuplicateField(name string) error { return errors.New(errDuplicateField, errors.NewKV("Name", name)) } @@ -543,3 +529,98 @@ func NewErrInvalidViewQueryMissingQuery() error { errors.NewKV("Reason", "No query provided"), ) } + +func NewErrCollectionAlreadyExists(name string) error { + return errors.New( + errCollectionAlreadyExists, + errors.NewKV("Name", name), + ) +} + +func NewErrCollectionIDAlreadyExists(id uint32) error { + return errors.New( + errCollectionAlreadyExists, + errors.NewKV("ID", id), + ) +} + +func NewErrMultipleActiveCollectionVersions(name string, root uint32) error { + return errors.New( + errMultipleActiveCollectionVersions, + errors.NewKV("Name", name), + errors.NewKV("Root", root), + ) +} + +func NewErrCollectionSourcesCannotBeAddedRemoved(colID uint32) error { + return errors.New( + errCollectionSourcesCannotBeAddedRemoved, + errors.NewKV("CollectionID", colID), + ) +} + +func NewErrCollectionSourceIDMutated(colID uint32, newSrcID uint32, oldSrcID uint32) error { + return errors.New( + errCollectionSourceIDMutated, + errors.NewKV("CollectionID", colID), + errors.NewKV("NewCollectionSourceID", newSrcID), + errors.NewKV("OldCollectionSourceID", oldSrcID), + ) +} + +func NewErrCollectionIndexesCannotBeMutated(colID uint32) error { + return errors.New( + errCollectionIndexesCannotBeMutated, + errors.NewKV("CollectionID", colID), + ) +} + +func NewErrCollectionFieldsCannotBeMutated(colID uint32) error { + return errors.New( + errCollectionFieldsCannotBeMutated, + errors.NewKV("CollectionID", colID), + ) +} + +func NewErrCollectionPolicyCannotBeMutated(colID uint32) error { + return errors.New( + errCollectionPolicyCannotBeMutated, + errors.NewKV("CollectionID", colID), + ) +} + +func NewErrCollectionRootIDCannotBeMutated(colID uint32) error { + return errors.New( + errCollectionRootIDCannotBeMutated, + errors.NewKV("CollectionID", colID), + ) +} + +func NewErrCollectionSchemaVersionIDCannotBeMutated(colID uint32) error { + return errors.New( + errCollectionSchemaVersionIDCannotBeMutated, + errors.NewKV("CollectionID", colID), + ) +} + +func NewErrCollectionsCannotBeDeleted(colID uint32) error { + return errors.New( + errCollectionsCannotBeDeleted, + errors.NewKV("CollectionID", colID), + ) +} + +func NewErrSecondaryFieldOnSchema(name string) error { + return errors.New( + errSecondaryFieldOnSchema, + errors.NewKV("Name", name), + ) +} + +func NewErrRelationMissingField(objectName, relationName string) error { + return errors.New( + errRelationMissingField, + errors.NewKV("Object", objectName), + errors.NewKV("RelationName", relationName), + ) +} diff --git a/db/fetcher/encoded_doc.go b/db/fetcher/encoded_doc.go index 889aea848a..cb4345abe1 100644 --- a/db/fetcher/encoded_doc.go +++ b/db/fetcher/encoded_doc.go @@ -106,13 +106,13 @@ func (encdoc *encodedDocument) Reset() { } // Decode returns a properly decoded document object -func Decode(encdoc EncodedDocument, sd client.SchemaDescription) (*client.Document, error) { +func Decode(encdoc EncodedDocument, collectionDefinition client.CollectionDefinition) (*client.Document, error) { docID, err := client.NewDocIDFromString(string(encdoc.ID())) if err != nil { return nil, err } - doc := client.NewDocWithID(docID, sd) + doc := client.NewDocWithID(docID, collectionDefinition) properties, err := encdoc.Properties(false) if err != nil { return nil, err diff --git a/db/fetcher/fetcher.go b/db/fetcher/fetcher.go index e4bb08cee4..894361dea4 100644 --- a/db/fetcher/fetcher.go +++ b/db/fetcher/fetcher.go @@ -18,11 +18,16 @@ import ( "github.com/bits-and-blooms/bitset" dsq "github.com/ipfs/go-datastore/query" + "github.com/sourcenetwork/immutable" + + "github.com/sourcenetwork/defradb/acp" + acpIdentity "github.com/sourcenetwork/defradb/acp/identity" "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/core" "github.com/sourcenetwork/defradb/datastore" "github.com/sourcenetwork/defradb/datastore/iterable" "github.com/sourcenetwork/defradb/db/base" + "github.com/sourcenetwork/defradb/db/permission" "github.com/sourcenetwork/defradb/planner/mapper" "github.com/sourcenetwork/defradb/request/graphql/parser" ) @@ -56,7 +61,9 @@ func (s *ExecInfo) Reset() { type Fetcher interface { Init( ctx context.Context, + identity immutable.Option[acpIdentity.Identity], txn datastore.Txn, + acp immutable.Option[acp.ACP], col client.Collection, fields []client.FieldDefinition, filter *mapper.Filter, @@ -81,6 +88,10 @@ var ( // DocumentFetcher is a utility to incrementally fetch all the documents. type DocumentFetcher struct { + identity immutable.Option[acpIdentity.Identity] + acp immutable.Option[acp.ACP] + passedPermissionCheck bool // have valid permission to access + col client.Collection reverse bool deletedDocs bool @@ -136,7 +147,9 @@ type DocumentFetcher struct { // Init implements DocumentFetcher. func (df *DocumentFetcher) Init( ctx context.Context, + identity immutable.Option[acpIdentity.Identity], txn datastore.Txn, + acp immutable.Option[acp.ACP], col client.Collection, fields []client.FieldDefinition, filter *mapper.Filter, @@ -146,7 +159,7 @@ func (df *DocumentFetcher) Init( ) error { df.txn = txn - err := df.init(col, fields, filter, docmapper, reverse) + err := df.init(identity, acp, col, fields, filter, docmapper, reverse) if err != nil { return err } @@ -156,19 +169,23 @@ func (df *DocumentFetcher) Init( df.deletedDocFetcher = new(DocumentFetcher) df.deletedDocFetcher.txn = txn } - return df.deletedDocFetcher.init(col, fields, filter, docmapper, reverse) + return df.deletedDocFetcher.init(identity, acp, col, fields, filter, docmapper, reverse) } return nil } func (df *DocumentFetcher) init( + identity immutable.Option[acpIdentity.Identity], + acp immutable.Option[acp.ACP], col client.Collection, fields []client.FieldDefinition, filter *mapper.Filter, docMapper *core.DocumentMapping, reverse bool, ) error { + df.identity = identity + df.acp = acp df.col = col df.reverse = reverse df.initialized = true @@ -476,6 +493,7 @@ func (df *DocumentFetcher) processKV(kv *keyValue) error { } } df.doc.id = []byte(kv.Key.DocID) + df.passedPermissionCheck = false df.passedFilter = false df.ranFilter = false @@ -544,24 +562,26 @@ func (df *DocumentFetcher) FetchNext(ctx context.Context) (EncodedDocument, Exec (df.reverse && ddf.kv.Key.DocID > df.kv.Key.DocID) || (!df.reverse && ddf.kv.Key.DocID < df.kv.Key.DocID) { encdoc, execInfo, err := ddf.FetchNext(ctx) + if err != nil { return nil, ExecInfo{}, err } - if encdoc != nil { - return encdoc, execInfo, err - } resultExecInfo.Add(execInfo) + if encdoc != nil { + return encdoc, resultExecInfo, nil + } } } } encdoc, execInfo, err := df.fetchNext(ctx) + if err != nil { return nil, ExecInfo{}, err } - resultExecInfo.Add(execInfo) + resultExecInfo.Add(execInfo) return encdoc, resultExecInfo, err } @@ -573,9 +593,6 @@ func (df *DocumentFetcher) fetchNext(ctx context.Context) (EncodedDocument, Exec if df.kv == nil { return nil, ExecInfo{}, client.NewErrUninitializeProperty("DocumentFetcher", "kv") } - // save the DocID of the current kv pair so we can track when we cross the doc pair boundries - // keyparts := df.kv.Key.List() - // key := keyparts[len(keyparts)-2] prevExecInfo := df.execInfo defer func() { df.execInfo.Add(prevExecInfo) }() @@ -584,8 +601,7 @@ func (df *DocumentFetcher) fetchNext(ctx context.Context) (EncodedDocument, Exec // we'll know when were done when either // A) Reach the end of the iterator for { - err := df.processKV(df.kv) - if err != nil { + if err := df.processKV(df.kv); err != nil { return nil, ExecInfo{}, err } @@ -606,16 +622,45 @@ func (df *DocumentFetcher) fetchNext(ctx context.Context) (EncodedDocument, Exec } } - // if we don't pass the filter (ran and pass) - // theres no point in collecting other select fields - // so we seek to the next doc - spansDone, docDone, err := df.nextKey(ctx, !df.passedFilter && df.ranFilter) + // Check if we have read access, for document on this collection, with the given identity. + if !df.passedPermissionCheck { + if !df.acp.HasValue() { + // If no acp is available, then we have unrestricted access. + df.passedPermissionCheck = true + } else { + hasPermission, err := permission.CheckAccessOfDocOnCollectionWithACP( + ctx, + df.identity, + df.acp.Value(), + df.col, + acp.ReadPermission, + df.kv.Key.DocID, + ) + + if err != nil { + df.passedPermissionCheck = false + return nil, ExecInfo{}, err + } + + df.passedPermissionCheck = hasPermission + } + } + + // if we don't pass the filter (ran and pass) or if we don't have access to document then + // there is no point in collecting other select fields, so we seek to the next doc. + spansDone, docDone, err := df.nextKey(ctx, !df.passedPermissionCheck || !df.passedFilter && df.ranFilter) + if err != nil { return nil, ExecInfo{}, err } - if docDone { - df.execInfo.DocsFetched++ + if !docDone { + continue + } + + df.execInfo.DocsFetched++ + + if df.passedPermissionCheck { if df.filter != nil { // if we passed, return if df.passedFilter { @@ -636,21 +681,11 @@ func (df *DocumentFetcher) fetchNext(ctx context.Context) (EncodedDocument, Exec } else { return df.doc, df.execInfo, nil } + } - if !spansDone { - continue - } - + if spansDone { return nil, df.execInfo, nil } - - // // crossed document kv boundary? - // // if so, return document - // newkeyparts := df.kv.Key.List() - // newKey := newkeyparts[len(newkeyparts)-2] - // if newKey != key { - // return df.doc, nil - // } } } diff --git a/db/fetcher/indexer.go b/db/fetcher/indexer.go index 158c7cb88d..2e776fd55b 100644 --- a/db/fetcher/indexer.go +++ b/db/fetcher/indexer.go @@ -13,6 +13,10 @@ package fetcher import ( "context" + "github.com/sourcenetwork/immutable" + + "github.com/sourcenetwork/defradb/acp" + acpIdentity "github.com/sourcenetwork/defradb/acp/identity" "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/core" "github.com/sourcenetwork/defradb/datastore" @@ -54,7 +58,9 @@ func NewIndexFetcher( func (f *IndexFetcher) Init( ctx context.Context, + identity immutable.Option[acpIdentity.Identity], txn datastore.Txn, + acp immutable.Option[acp.ACP], col client.Collection, fields []client.FieldDefinition, filter *mapper.Filter, @@ -93,7 +99,18 @@ outer: f.indexIter = iter if f.docFetcher != nil && len(f.docFields) > 0 { - err = f.docFetcher.Init(ctx, f.txn, f.col, f.docFields, f.docFilter, f.mapping, false, false) + err = f.docFetcher.Init( + ctx, + identity, + f.txn, + acp, + f.col, + f.docFields, + f.docFilter, + f.mapping, + false, + false, + ) } return err @@ -128,7 +145,7 @@ func (f *IndexFetcher) FetchNext(ctx context.Context) (EncodedDocument, ExecInfo property := &encProperty{Desc: indexedField} field := res.key.Fields[i] - if field.Value == nil { + if field.Value.IsNil() { hasNilField = true } @@ -147,11 +164,14 @@ func (f *IndexFetcher) FetchNext(ctx context.Context) (EncodedDocument, ExecInfo if f.indexDesc.Unique && !hasNilField { f.doc.id = res.value } else { - docID, ok := res.key.Fields[len(res.key.Fields)-1].Value.(string) - if !ok { + lastVal := res.key.Fields[len(res.key.Fields)-1].Value + if str, ok := lastVal.String(); ok { + f.doc.id = []byte(str) + } else if bytes, ok := lastVal.Bytes(); ok { + f.doc.id = bytes + } else { return nil, ExecInfo{}, err } - f.doc.id = []byte(docID) } if f.docFetcher != nil && len(f.docFields) > 0 { diff --git a/db/fetcher/indexer_iterators.go b/db/fetcher/indexer_iterators.go index 482c15d31a..d1ca5841c3 100644 --- a/db/fetcher/indexer_iterators.go +++ b/db/fetcher/indexer_iterators.go @@ -161,7 +161,7 @@ func (i *eqSingleIndexIterator) Close() error { type inIndexIterator struct { indexIterator - inValues []any + inValues []client.NormalValue nextValIndex int ctx context.Context store datastore.DSReaderWriter @@ -274,7 +274,7 @@ func (iter *scanningIndexIterator) Next() (indexIterResult, error) { // checks if the value satisfies the condition type valueMatcher interface { - Match(any) (bool, error) + Match(client.NormalValue) (bool, error) } type intMatcher struct { @@ -282,12 +282,17 @@ type intMatcher struct { evalFunc func(int64, int64) bool } -func (m *intMatcher) Match(value any) (bool, error) { - intVal, ok := value.(int64) - if !ok { - return false, NewErrUnexpectedTypeValue[int64](value) +func (m *intMatcher) Match(value client.NormalValue) (bool, error) { + if intVal, ok := value.Int(); ok { + return m.evalFunc(intVal, m.value), nil + } + if intOptVal, ok := value.NillableInt(); ok { + if !intOptVal.HasValue() { + return false, nil + } + return m.evalFunc(intOptVal.Value(), m.value), nil } - return m.evalFunc(intVal, m.value), nil + return false, NewErrUnexpectedTypeValue[int64](value) } type floatMatcher struct { @@ -295,12 +300,17 @@ type floatMatcher struct { evalFunc func(float64, float64) bool } -func (m *floatMatcher) Match(value any) (bool, error) { - floatVal, ok := value.(float64) - if !ok { - return false, NewErrUnexpectedTypeValue[float64](value) +func (m *floatMatcher) Match(value client.NormalValue) (bool, error) { + if floatVal, ok := value.Float(); ok { + return m.evalFunc(floatVal, m.value), nil + } + if floatOptVal, ok := value.NillableFloat(); ok { + if !floatOptVal.HasValue() { + return false, nil + } + return m.evalFunc(floatOptVal.Value(), m.value), nil } - return m.evalFunc(m.value, floatVal), nil + return false, NewErrUnexpectedTypeValue[float64](value) } type stringMatcher struct { @@ -308,74 +318,36 @@ type stringMatcher struct { evalFunc func(string, string) bool } -func (m *stringMatcher) Match(value any) (bool, error) { - stringVal, ok := value.(string) - if !ok { - return false, NewErrUnexpectedTypeValue[string](value) +func (m *stringMatcher) Match(value client.NormalValue) (bool, error) { + if strVal, ok := value.String(); ok { + return m.evalFunc(strVal, m.value), nil + } + if strOptVal, ok := value.NillableString(); ok { + if !strOptVal.HasValue() { + return false, nil + } + return m.evalFunc(strOptVal.Value(), m.value), nil } - return m.evalFunc(m.value, stringVal), nil + return false, NewErrUnexpectedTypeValue[string](value) } -type nilMatcher struct{} +type nilMatcher struct { + matchNil bool +} -func (m *nilMatcher) Match(value any) (bool, error) { - return value == nil, nil +func (m *nilMatcher) Match(value client.NormalValue) (bool, error) { + return value.IsNil() == m.matchNil, nil } // checks if the index value is or is not in the given array type indexInArrayMatcher struct { - inValues []any + inValues []client.NormalValue isIn bool } -func newNinIndexCmp(values []any, kind client.FieldKind, isIn bool) (*indexInArrayMatcher, error) { - normalizeValueFunc := getNormalizeValueFunc(kind) - for i := range values { - normalized, err := normalizeValueFunc(values[i]) - if err != nil { - return nil, err - } - values[i] = normalized - } - return &indexInArrayMatcher{inValues: values, isIn: isIn}, nil -} - -func getNormalizeValueFunc(kind client.FieldKind) func(any) (any, error) { - switch kind { - case client.FieldKind_NILLABLE_INT: - return func(value any) (any, error) { - if v, ok := value.(int64); ok { - return v, nil - } - if v, ok := value.(int32); ok { - return int64(v), nil - } - return nil, ErrInvalidInOperatorValue - } - case client.FieldKind_NILLABLE_FLOAT: - return func(value any) (any, error) { - if v, ok := value.(float64); ok { - return v, nil - } - if v, ok := value.(float32); ok { - return float64(v), nil - } - return nil, ErrInvalidInOperatorValue - } - case client.FieldKind_NILLABLE_STRING: - return func(value any) (any, error) { - if v, ok := value.(string); ok { - return v, nil - } - return nil, ErrInvalidInOperatorValue - } - } - return nil -} - -func (m *indexInArrayMatcher) Match(value any) (bool, error) { +func (m *indexInArrayMatcher) Match(value client.NormalValue) (bool, error) { for _, inVal := range m.inValues { - if inVal == value { + if inVal.Unwrap() == value.Unwrap() { return m.isIn, nil } } @@ -419,17 +391,23 @@ func newLikeIndexCmp(filterValue string, isLike bool, isCaseInsensitive bool) (* return matcher, nil } -func (m *indexLikeMatcher) Match(value any) (bool, error) { - currentVal, ok := value.(string) +func (m *indexLikeMatcher) Match(value client.NormalValue) (bool, error) { + strVal, ok := value.String() if !ok { - return false, NewErrUnexpectedTypeValue[string](currentVal) + strOptVal, ok := value.NillableString() + if !ok { + return false, NewErrUnexpectedTypeValue[string](value) + } + if !strOptVal.HasValue() { + return false, nil + } + strVal = strOptVal.Value() } - if m.isCaseInsensitive { - currentVal = strings.ToLower(currentVal) + strVal = strings.ToLower(strVal) } - return m.doesMatch(currentVal) == m.isLike, nil + return m.doesMatch(strVal) == m.isLike, nil } func (m *indexLikeMatcher) doesMatch(currentVal string) bool { @@ -451,7 +429,7 @@ func (m *indexLikeMatcher) doesMatch(currentVal string) bool { type anyMatcher struct{} -func (m *anyMatcher) Match(any) (bool, error) { return true, nil } +func (m *anyMatcher) Match(client.NormalValue) (bool, error) { return true, nil } // newPrefixIndexIterator creates a new eqPrefixIndexIterator for fetching indexed data. // It can modify the input matchers slice. @@ -459,7 +437,7 @@ func (f *IndexFetcher) newPrefixIndexIterator( fieldConditions []fieldFilterCond, matchers []valueMatcher, ) (*eqPrefixIndexIterator, error) { - keyFieldValues := make([]any, 0, len(fieldConditions)) + keyFieldValues := make([]client.NormalValue, 0, len(fieldConditions)) for i := range fieldConditions { if fieldConditions[i].op != opEq { // prefix can be created only for subsequent _eq conditions @@ -496,14 +474,12 @@ func (f *IndexFetcher) newInIndexIterator( fieldConditions []fieldFilterCond, matchers []valueMatcher, ) (*inIndexIterator, error) { - inArr, ok := fieldConditions[0].val.([]any) - if !ok { + if !fieldConditions[0].val.IsArray() { return nil, ErrInvalidInOperatorValue } - inValues := make([]any, 0, len(inArr)) - for _, v := range inArr { - fieldVal := client.NewFieldValue(client.NONE_CRDT, v) - inValues = append(inValues, fieldVal.Value()) + inValues, err := client.ToArrayOfNormalValues(fieldConditions[0].val) + if err != nil { + return nil, err } // iterators for _in filter already iterate over keys with first field value @@ -514,7 +490,7 @@ func (f *IndexFetcher) newInIndexIterator( var iter indexIterator if isUniqueFetchByFullKey(&f.indexDesc, fieldConditions) { - keyFieldValues := make([]any, len(fieldConditions)) + keyFieldValues := make([]client.NormalValue, len(fieldConditions)) for i := range fieldConditions { keyFieldValues[i] = fieldConditions[i].val } @@ -547,7 +523,7 @@ func (f *IndexFetcher) newIndexDataStoreKey() core.IndexDataStoreKey { return key } -func (f *IndexFetcher) newIndexDataStoreKeyWithValues(values []any) core.IndexDataStoreKey { +func (f *IndexFetcher) newIndexDataStoreKeyWithValues(values []client.NormalValue) core.IndexDataStoreKey { fields := make([]core.IndexedField, len(values)) for i := range values { fields[i].Value = values[i] @@ -557,7 +533,10 @@ func (f *IndexFetcher) newIndexDataStoreKeyWithValues(values []any) core.IndexDa } func (f *IndexFetcher) createIndexIterator() (indexIterator, error) { - fieldConditions := f.determineFieldFilterConditions() + fieldConditions, err := f.determineFieldFilterConditions() + if err != nil { + return nil, err + } matchers, err := createValueMatchers(fieldConditions) if err != nil { @@ -567,7 +546,7 @@ func (f *IndexFetcher) createIndexIterator() (indexIterator, error) { switch fieldConditions[0].op { case opEq: if isUniqueFetchByFullKey(&f.indexDesc, fieldConditions) { - keyFieldValues := make([]any, len(fieldConditions)) + keyFieldValues := make([]client.NormalValue, len(fieldConditions)) for i := range fieldConditions { keyFieldValues[i] = fieldConditions[i].val } @@ -600,49 +579,44 @@ func createValueMatcher(condition *fieldFilterCond) (valueMatcher, error) { return &anyMatcher{}, nil } - if client.IsNillableKind(condition.kind) && condition.val == nil { - return &nilMatcher{}, nil + if condition.val.IsNil() { + return &nilMatcher{matchNil: condition.op == opEq}, nil } switch condition.op { case opEq, opGt, opGe, opLt, opLe, opNe: - switch condition.kind { - case client.FieldKind_NILLABLE_INT: - var intVal int64 - switch v := condition.val.(type) { - case int64: - intVal = v - case int32: - intVal = int64(v) - case int: - intVal = int64(v) - default: - return nil, NewErrUnexpectedTypeValue[int64](condition.val) - } - return &intMatcher{value: intVal, evalFunc: getCompareValsFunc[int64](condition.op)}, nil - case client.FieldKind_NILLABLE_FLOAT: - floatVal, ok := condition.val.(float64) - if !ok { - return nil, NewErrUnexpectedTypeValue[float64](condition.val) - } - return &floatMatcher{value: floatVal, evalFunc: getCompareValsFunc[float64](condition.op)}, nil - case client.FieldKind_DocID, client.FieldKind_NILLABLE_STRING: - strVal, ok := condition.val.(string) - if !ok { - return nil, NewErrUnexpectedTypeValue[string](condition.val) - } - return &stringMatcher{value: strVal, evalFunc: getCompareValsFunc[string](condition.op)}, nil + if v, ok := condition.val.Int(); ok { + return &intMatcher{value: v, evalFunc: getCompareValsFunc[int64](condition.op)}, nil + } + if v, ok := condition.val.NillableInt(); ok { + return &intMatcher{value: v.Value(), evalFunc: getCompareValsFunc[int64](condition.op)}, nil + } + if v, ok := condition.val.Float(); ok { + return &floatMatcher{value: v, evalFunc: getCompareValsFunc[float64](condition.op)}, nil + } + if v, ok := condition.val.NillableFloat(); ok { + return &floatMatcher{value: v.Value(), evalFunc: getCompareValsFunc[float64](condition.op)}, nil + } + if v, ok := condition.val.String(); ok { + return &stringMatcher{value: v, evalFunc: getCompareValsFunc[string](condition.op)}, nil + } + if v, ok := condition.val.NillableString(); ok { + return &stringMatcher{value: v.Value(), evalFunc: getCompareValsFunc[string](condition.op)}, nil } case opIn, opNin: - inArr, ok := condition.val.([]any) - if !ok { - return nil, ErrInvalidInOperatorValue + inVals, err := client.ToArrayOfNormalValues(condition.val) + if err != nil { + return nil, err } - return newNinIndexCmp(inArr, condition.kind, condition.op == opIn) + return &indexInArrayMatcher{inValues: inVals, isIn: condition.op == opIn}, nil case opLike, opNlike, opILike, opNILike: - strVal, ok := condition.val.(string) + strVal, ok := condition.val.String() if !ok { - return nil, NewErrUnexpectedTypeValue[string](condition.val) + strOptVal, ok := condition.val.NillableString() + if !ok { + return nil, NewErrUnexpectedTypeValue[string](condition.val) + } + strVal = strOptVal.Value() } isLike := condition.op == opLike || condition.op == opILike isCaseInsensitive := condition.op == opILike || condition.op == opNILike @@ -668,14 +642,14 @@ func createValueMatchers(conditions []fieldFilterCond) ([]valueMatcher, error) { type fieldFilterCond struct { op string - val any + val client.NormalValue kind client.FieldKind } // determineFieldFilterConditions determines the conditions and their corresponding operation // for each indexed field. // It returns a slice of fieldFilterCond, where each element corresponds to a field in the index. -func (f *IndexFetcher) determineFieldFilterConditions() []fieldFilterCond { +func (f *IndexFetcher) determineFieldFilterConditions() ([]fieldFilterCond, error) { result := make([]fieldFilterCond, 0, len(f.indexedFields)) for i := range f.indexedFields { fieldInd := f.mapping.FirstIndexOfName(f.indexedFields[i].Name) @@ -692,9 +666,19 @@ func (f *IndexFetcher) determineFieldFilterConditions() []fieldFilterCond { condMap := indexFilterCond.(map[connor.FilterKey]any) for key, filterVal := range condMap { opKey := key.(*mapper.Operator) + var normalVal client.NormalValue + var err error + if filterVal == nil { + normalVal, err = client.NewNormalNil(f.indexedFields[i].Kind) + } else { + normalVal, err = client.NewNormalValue(filterVal) + } + if err != nil { + return nil, err + } result = append(result, fieldFilterCond{ op: opKey.Operation, - val: filterVal, + val: normalVal, kind: f.indexedFields[i].Kind, }) break @@ -702,10 +686,14 @@ func (f *IndexFetcher) determineFieldFilterConditions() []fieldFilterCond { break } if !found { - result = append(result, fieldFilterCond{op: opAny}) + result = append(result, fieldFilterCond{ + op: opAny, + val: client.NormalVoid{}, + kind: f.indexedFields[i].Kind, + }) } } - return result + return result, nil } // isUniqueFetchByFullKey checks if the only index key can be fetched by the full index key. @@ -719,11 +707,11 @@ func isUniqueFetchByFullKey(indexDesc *client.IndexDescription, conditions []fie res := indexDesc.Unique && len(conditions) == len(indexDesc.Fields) // first condition is not required to be _eq, but if is, val must be not nil - res = res && (conditions[0].op != opEq || conditions[0].val != nil) + res = res && (conditions[0].op != opEq || !conditions[0].val.IsNil()) // for the rest it must be _eq and val must be not nil for i := 1; i < len(conditions); i++ { - res = res && (conditions[i].op == opEq && conditions[i].val != nil) + res = res && (conditions[i].op == opEq && !conditions[i].val.IsNil()) } return res } diff --git a/db/fetcher/mocks/fetcher.go b/db/fetcher/mocks/fetcher.go index 044425c70b..4f537aefea 100644 --- a/db/fetcher/mocks/fetcher.go +++ b/db/fetcher/mocks/fetcher.go @@ -3,16 +3,21 @@ package mocks import ( - context "context" - + acp "github.com/sourcenetwork/defradb/acp" client "github.com/sourcenetwork/defradb/client" + context "context" + core "github.com/sourcenetwork/defradb/core" datastore "github.com/sourcenetwork/defradb/datastore" fetcher "github.com/sourcenetwork/defradb/db/fetcher" + identity "github.com/sourcenetwork/defradb/acp/identity" + + immutable "github.com/sourcenetwork/immutable" + mapper "github.com/sourcenetwork/defradb/planner/mapper" mock "github.com/stretchr/testify/mock" @@ -133,13 +138,13 @@ func (_c *Fetcher_FetchNext_Call) RunAndReturn(run func(context.Context) (fetche return _c } -// Init provides a mock function with given fields: ctx, txn, col, fields, filter, docmapper, reverse, showDeleted -func (_m *Fetcher) Init(ctx context.Context, txn datastore.Txn, col client.Collection, fields []client.FieldDefinition, filter *mapper.Filter, docmapper *core.DocumentMapping, reverse bool, showDeleted bool) error { - ret := _m.Called(ctx, txn, col, fields, filter, docmapper, reverse, showDeleted) +// Init provides a mock function with given fields: ctx, _a1, txn, _a3, col, fields, filter, docmapper, reverse, showDeleted +func (_m *Fetcher) Init(ctx context.Context, _a1 immutable.Option[identity.Identity], txn datastore.Txn, _a3 immutable.Option[acp.ACP], col client.Collection, fields []client.FieldDefinition, filter *mapper.Filter, docmapper *core.DocumentMapping, reverse bool, showDeleted bool) error { + ret := _m.Called(ctx, _a1, txn, _a3, col, fields, filter, docmapper, reverse, showDeleted) var r0 error - if rf, ok := ret.Get(0).(func(context.Context, datastore.Txn, client.Collection, []client.FieldDefinition, *mapper.Filter, *core.DocumentMapping, bool, bool) error); ok { - r0 = rf(ctx, txn, col, fields, filter, docmapper, reverse, showDeleted) + if rf, ok := ret.Get(0).(func(context.Context, immutable.Option[identity.Identity], datastore.Txn, immutable.Option[acp.ACP], client.Collection, []client.FieldDefinition, *mapper.Filter, *core.DocumentMapping, bool, bool) error); ok { + r0 = rf(ctx, _a1, txn, _a3, col, fields, filter, docmapper, reverse, showDeleted) } else { r0 = ret.Error(0) } @@ -154,20 +159,22 @@ type Fetcher_Init_Call struct { // Init is a helper method to define mock.On call // - ctx context.Context +// - _a1 immutable.Option[identity.Identity] // - txn datastore.Txn +// - _a3 immutable.Option[acp.ACP] // - col client.Collection // - fields []client.FieldDefinition // - filter *mapper.Filter // - docmapper *core.DocumentMapping // - reverse bool // - showDeleted bool -func (_e *Fetcher_Expecter) Init(ctx interface{}, txn interface{}, col interface{}, fields interface{}, filter interface{}, docmapper interface{}, reverse interface{}, showDeleted interface{}) *Fetcher_Init_Call { - return &Fetcher_Init_Call{Call: _e.mock.On("Init", ctx, txn, col, fields, filter, docmapper, reverse, showDeleted)} +func (_e *Fetcher_Expecter) Init(ctx interface{}, _a1 interface{}, txn interface{}, _a3 interface{}, col interface{}, fields interface{}, filter interface{}, docmapper interface{}, reverse interface{}, showDeleted interface{}) *Fetcher_Init_Call { + return &Fetcher_Init_Call{Call: _e.mock.On("Init", ctx, _a1, txn, _a3, col, fields, filter, docmapper, reverse, showDeleted)} } -func (_c *Fetcher_Init_Call) Run(run func(ctx context.Context, txn datastore.Txn, col client.Collection, fields []client.FieldDefinition, filter *mapper.Filter, docmapper *core.DocumentMapping, reverse bool, showDeleted bool)) *Fetcher_Init_Call { +func (_c *Fetcher_Init_Call) Run(run func(ctx context.Context, _a1 immutable.Option[identity.Identity], txn datastore.Txn, _a3 immutable.Option[acp.ACP], col client.Collection, fields []client.FieldDefinition, filter *mapper.Filter, docmapper *core.DocumentMapping, reverse bool, showDeleted bool)) *Fetcher_Init_Call { _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(datastore.Txn), args[2].(client.Collection), args[3].([]client.FieldDefinition), args[4].(*mapper.Filter), args[5].(*core.DocumentMapping), args[6].(bool), args[7].(bool)) + run(args[0].(context.Context), args[1].(immutable.Option[identity.Identity]), args[2].(datastore.Txn), args[3].(immutable.Option[acp.ACP]), args[4].(client.Collection), args[5].([]client.FieldDefinition), args[6].(*mapper.Filter), args[7].(*core.DocumentMapping), args[8].(bool), args[9].(bool)) }) return _c } @@ -177,7 +184,7 @@ func (_c *Fetcher_Init_Call) Return(_a0 error) *Fetcher_Init_Call { return _c } -func (_c *Fetcher_Init_Call) RunAndReturn(run func(context.Context, datastore.Txn, client.Collection, []client.FieldDefinition, *mapper.Filter, *core.DocumentMapping, bool, bool) error) *Fetcher_Init_Call { +func (_c *Fetcher_Init_Call) RunAndReturn(run func(context.Context, immutable.Option[identity.Identity], datastore.Txn, immutable.Option[acp.ACP], client.Collection, []client.FieldDefinition, *mapper.Filter, *core.DocumentMapping, bool, bool) error) *Fetcher_Init_Call { _c.Call.Return(run) return _c } diff --git a/db/fetcher/mocks/utils.go b/db/fetcher/mocks/utils.go index 298d5b2ad6..524c46fc9e 100644 --- a/db/fetcher/mocks/utils.go +++ b/db/fetcher/mocks/utils.go @@ -27,6 +27,8 @@ func NewStubbedFetcher(t *testing.T) *Fetcher { mock.Anything, mock.Anything, mock.Anything, + mock.Anything, + mock.Anything, ).Maybe().Return(nil) f.EXPECT().Start(mock.Anything, mock.Anything).Maybe().Return(nil) f.EXPECT().FetchNext(mock.Anything).Maybe().Return(nil, nil) diff --git a/db/fetcher/versioned.go b/db/fetcher/versioned.go index 3f05f2c29a..096002521c 100644 --- a/db/fetcher/versioned.go +++ b/db/fetcher/versioned.go @@ -19,6 +19,10 @@ import ( ds "github.com/ipfs/go-datastore" format "github.com/ipfs/go-ipld-format" + "github.com/sourcenetwork/immutable" + + "github.com/sourcenetwork/defradb/acp" + acpIdentity "github.com/sourcenetwork/defradb/acp/identity" "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/core" "github.com/sourcenetwork/defradb/datastore" @@ -91,6 +95,8 @@ type VersionedFetcher struct { queuedCids *list.List + acp immutable.Option[acp.ACP] + col client.Collection // @todo index *client.IndexDescription mCRDTs map[uint32]merklecrdt.MerkleCRDT @@ -99,7 +105,9 @@ type VersionedFetcher struct { // Init initializes the VersionedFetcher. func (vf *VersionedFetcher) Init( ctx context.Context, + identity immutable.Option[acpIdentity.Identity], txn datastore.Txn, + acp immutable.Option[acp.ACP], col client.Collection, fields []client.FieldDefinition, filter *mapper.Filter, @@ -107,6 +115,7 @@ func (vf *VersionedFetcher) Init( reverse bool, showDeleted bool, ) error { + vf.acp = acp vf.col = col vf.queuedCids = list.New() vf.mCRDTs = make(map[uint32]merklecrdt.MerkleCRDT) @@ -130,7 +139,18 @@ func (vf *VersionedFetcher) Init( // run the DF init, VersionedFetchers only supports the Primary (0) index vf.DocumentFetcher = new(DocumentFetcher) - return vf.DocumentFetcher.Init(ctx, vf.store, col, fields, filter, docmapper, reverse, showDeleted) + return vf.DocumentFetcher.Init( + ctx, + identity, + vf.store, + acp, + col, + fields, + filter, + docmapper, + reverse, + showDeleted, + ) } // Start serializes the correct state according to the Key and CID. diff --git a/db/index.go b/db/index.go index 319cdeb8a7..693a18a5bf 100644 --- a/db/index.go +++ b/db/index.go @@ -36,8 +36,12 @@ func canConvertIndexFieldValue[T any](val any) bool { } func getValidateIndexFieldFunc(kind client.FieldKind) func(any) bool { + if kind.IsObject() && !kind.IsArray() { + return canConvertIndexFieldValue[string] + } + switch kind { - case client.FieldKind_NILLABLE_STRING, client.FieldKind_FOREIGN_OBJECT: + case client.FieldKind_NILLABLE_STRING: return canConvertIndexFieldValue[string] case client.FieldKind_NILLABLE_INT: return canConvertIndexFieldValue[int64] @@ -112,18 +116,22 @@ type collectionBaseIndex struct { fieldsDescs []client.SchemaFieldDescription } -func (index *collectionBaseIndex) getDocFieldValues(doc *client.Document) ([]*client.FieldValue, error) { - result := make([]*client.FieldValue, 0, len(index.fieldsDescs)) +func (index *collectionBaseIndex) getDocFieldValues(doc *client.Document) ([]client.NormalValue, error) { + result := make([]client.NormalValue, 0, len(index.fieldsDescs)) for iter := range index.fieldsDescs { fieldVal, err := doc.TryGetValue(index.fieldsDescs[iter].Name) if err != nil { return nil, err } if fieldVal == nil || fieldVal.Value() == nil { - result = append(result, client.NewFieldValue(client.NONE_CRDT, nil)) + normalNil, err := client.NewNormalNil(index.fieldsDescs[iter].Kind) + if err != nil { + return nil, err + } + result = append(result, normalNil) continue } - result = append(result, fieldVal) + result = append(result, fieldVal.NormalValue()) } return result, nil } @@ -138,7 +146,7 @@ func (index *collectionBaseIndex) getDocumentsIndexKey( fields := make([]core.IndexedField, len(index.fieldsDescs)) for i := range index.fieldsDescs { - fields[i].Value = fieldValues[i].Value() + fields[i].Value = fieldValues[i] fields[i].Descending = index.desc.Fields[i].Descending } return core.NewIndexDataStoreKey(index.collection.ID(), index.desc.ID, fields), nil @@ -207,7 +215,7 @@ func (index *collectionSimpleIndex) getDocumentsIndexKey( return core.IndexDataStoreKey{}, err } - key.Fields = append(key.Fields, core.IndexedField{Value: doc.ID().String()}) + key.Fields = append(key.Fields, core.IndexedField{Value: client.NewNormalString(doc.ID().String())}) return key, nil } @@ -264,7 +272,7 @@ func (index *collectionSimpleIndex) deleteDocIndex( // hasIndexKeyNilField returns true if the index key has a field with nil value func hasIndexKeyNilField(key *core.IndexDataStoreKey) bool { for i := range key.Fields { - if key.Fields[i].Value == nil { + if key.Fields[i].Value.IsNil() { return true } } @@ -330,7 +338,7 @@ func (index *collectionUniqueIndex) getDocumentsIndexRecord( return core.IndexDataStoreKey{}, nil, err } if hasIndexKeyNilField(&key) { - key.Fields = append(key.Fields, core.IndexedField{Value: doc.ID().String()}) + key.Fields = append(key.Fields, core.IndexedField{Value: client.NewNormalString(doc.ID().String())}) return key, []byte{}, nil } else { return key, []byte(doc.ID().String()), nil @@ -373,6 +381,11 @@ func (index *collectionUniqueIndex) Update( oldDoc *client.Document, newDoc *client.Document, ) error { + // We only need to update the index if one of the indexed fields + // on the document has been changed. + if !isUpdatingIndexedFields(index, oldDoc, newDoc) { + return nil + } newKey, newVal, err := index.prepareIndexRecordToStore(ctx, txn, newDoc) if err != nil { return err @@ -395,3 +408,25 @@ func (index *collectionUniqueIndex) deleteDocIndex( } return index.deleteIndexKey(ctx, txn, key) } + +func isUpdatingIndexedFields(index CollectionIndex, oldDoc, newDoc *client.Document) bool { + for _, indexedFields := range index.Description().Fields { + oldVal, getOldValErr := oldDoc.GetValue(indexedFields.Name) + newVal, getNewValErr := newDoc.GetValue(indexedFields.Name) + + // GetValue will return an error when the field doesn't exist. + // This will happen for oldDoc only if the field hasn't been set + // when first creating the document. For newDoc, this will happen + // only if the field hasn't been set when first creating the document + // AND the field hasn't been set on the update. + switch { + case getOldValErr != nil && getNewValErr != nil: + continue + case getOldValErr != nil && getNewValErr == nil: + return true + case oldVal.Value() != newVal.Value(): + return true + } + } + return false +} diff --git a/db/index_test.go b/db/index_test.go index 44c2e45f52..5409b6c20e 100644 --- a/db/index_test.go +++ b/db/index_test.go @@ -53,7 +53,7 @@ const ( type indexTestFixture struct { ctx context.Context - db *implicitTxnDB + db *db txn datastore.Txn users client.Collection t *testing.T @@ -219,7 +219,8 @@ func (f *indexTestFixture) createUserCollectionIndexOnAge() client.IndexDescript } func (f *indexTestFixture) dropIndex(colName, indexName string) error { - return f.db.dropCollectionIndex(f.ctx, f.txn, colName, indexName) + ctx := SetContextTxn(f.ctx, f.txn) + return f.db.dropCollectionIndex(ctx, colName, indexName) } func (f *indexTestFixture) countIndexPrefixes(indexName string) int { @@ -255,7 +256,8 @@ func (f *indexTestFixture) createCollectionIndexFor( collectionName string, desc client.IndexDescription, ) (client.IndexDescription, error) { - index, err := f.db.createCollectionIndex(f.ctx, f.txn, collectionName, desc) + ctx := SetContextTxn(f.ctx, f.txn) + index, err := f.db.createCollectionIndex(ctx, collectionName, desc) if err == nil { f.commitTxn() } @@ -263,11 +265,13 @@ func (f *indexTestFixture) createCollectionIndexFor( } func (f *indexTestFixture) getAllIndexes() (map[client.CollectionName][]client.IndexDescription, error) { - return f.db.getAllIndexDescriptions(f.ctx, f.txn) + ctx := SetContextTxn(f.ctx, f.txn) + return f.db.getAllIndexDescriptions(ctx) } func (f *indexTestFixture) getCollectionIndexes(colID uint32) ([]client.IndexDescription, error) { - return f.db.fetchCollectionIndexDescriptions(f.ctx, f.txn, colID) + ctx := SetContextTxn(f.ctx, f.txn) + return f.db.fetchCollectionIndexDescriptions(ctx, colID) } func TestCreateIndex_IfFieldsIsEmpty_ReturnError(t *testing.T) { @@ -784,7 +788,8 @@ func TestCollectionGetIndexes_ShouldCloseQueryIterator(t *testing.T) { mockedTxn.MockSystemstore.EXPECT().Query(mock.Anything, mock.Anything). Return(queryResults, nil) - _, err := f.users.WithTxn(mockedTxn).GetIndexes(f.ctx) + ctx := SetContextTxn(f.ctx, mockedTxn) + _, err := f.users.GetIndexes(ctx) assert.NoError(t, err) } @@ -840,7 +845,8 @@ func TestCollectionGetIndexes_IfSystemStoreFails_ReturnError(t *testing.T) { mockedTxn.EXPECT().Systemstore().Unset() mockedTxn.EXPECT().Systemstore().Return(mockedTxn.MockSystemstore).Maybe() - _, err := f.users.WithTxn(mockedTxn).GetIndexes(f.ctx) + ctx := SetContextTxn(f.ctx, mockedTxn) + _, err := f.users.GetIndexes(ctx) require.ErrorIs(t, err, testCase.ExpectedError) } } @@ -902,7 +908,8 @@ func TestCollectionGetIndexes_IfStoredIndexWithUnsupportedType_ReturnError(t *te mockedTxn.MockSystemstore.EXPECT().Query(mock.Anything, mock.Anything). Return(mocks.NewQueryResultsWithValues(t, indexDescData), nil) - _, err = collection.WithTxn(mockedTxn).GetIndexes(f.ctx) + ctx := SetContextTxn(f.ctx, mockedTxn) + _, err = collection.GetIndexes(ctx) require.ErrorIs(t, err, NewErrUnsupportedIndexFieldType(unsupportedKind)) } @@ -1093,17 +1100,18 @@ func TestDropIndex_IfFailsToDeleteFromStorage_ReturnError(t *testing.T) { mockedTxn.MockDatastore.EXPECT().Query(mock.Anything, mock.Anything).Maybe(). Return(mocks.NewQueryResultsWithValues(t), nil) - err := f.users.WithTxn(mockedTxn).DropIndex(f.ctx, testUsersColIndexName) + ctx := SetContextTxn(f.ctx, mockedTxn) + err := f.users.DropIndex(ctx, testUsersColIndexName) require.ErrorIs(t, err, testErr) } func TestDropIndex_ShouldUpdateCollectionsDescription(t *testing.T) { f := newIndexTestFixture(t) defer f.db.Close() - col := f.users.WithTxn(f.txn) - _, err := col.CreateIndex(f.ctx, getUsersIndexDescOnName()) + ctx := SetContextTxn(f.ctx, f.txn) + _, err := f.users.CreateIndex(ctx, getUsersIndexDescOnName()) require.NoError(t, err) - indOnAge, err := col.CreateIndex(f.ctx, getUsersIndexDescOnAge()) + indOnAge, err := f.users.CreateIndex(ctx, getUsersIndexDescOnAge()) require.NoError(t, err) f.commitTxn() @@ -1144,7 +1152,8 @@ func TestDropIndex_IfSystemStoreFails_ReturnError(t *testing.T) { mockedTxn.EXPECT().Systemstore().Unset() mockedTxn.EXPECT().Systemstore().Return(mockedTxn.MockSystemstore).Maybe() - err := f.users.WithTxn(mockedTxn).DropIndex(f.ctx, testUsersColIndexName) + ctx := SetContextTxn(f.ctx, mockedTxn) + err := f.users.DropIndex(ctx, testUsersColIndexName) require.ErrorIs(t, err, testErr) } @@ -1167,7 +1176,8 @@ func TestDropAllIndexes_ShouldDeleteAllIndexes(t *testing.T) { assert.Equal(t, 2, f.countIndexPrefixes("")) - err = f.users.(*collection).dropAllIndexes(f.ctx, f.txn) + ctx := SetContextTxn(f.ctx, f.txn) + err = f.users.(*collection).dropAllIndexes(ctx) assert.NoError(t, err) assert.Equal(t, 0, f.countIndexPrefixes("")) @@ -1179,7 +1189,8 @@ func TestDropAllIndexes_IfStorageFails_ReturnError(t *testing.T) { f.createUserCollectionIndexOnName() f.db.Close() - err := f.users.(*collection).dropAllIndexes(f.ctx, f.txn) + ctx := SetContextTxn(f.ctx, f.txn) + err := f.users.(*collection).dropAllIndexes(ctx) assert.Error(t, err) } @@ -1235,7 +1246,8 @@ func TestDropAllIndexes_IfSystemStorageFails_ReturnError(t *testing.T) { mockedTxn.EXPECT().Systemstore().Unset() mockedTxn.EXPECT().Systemstore().Return(mockedTxn.MockSystemstore).Maybe() - err := f.users.(*collection).dropAllIndexes(f.ctx, f.txn) + ctx := SetContextTxn(f.ctx, f.txn) + err := f.users.(*collection).dropAllIndexes(ctx) assert.ErrorIs(t, err, testErr, testCase.Name) } } @@ -1256,7 +1268,8 @@ func TestDropAllIndexes_ShouldCloseQueryIterator(t *testing.T) { mockedTxn.EXPECT().Systemstore().Unset() mockedTxn.EXPECT().Systemstore().Return(mockedTxn.MockSystemstore).Maybe() - _ = f.users.(*collection).dropAllIndexes(f.ctx, f.txn) + ctx := SetContextTxn(f.ctx, f.txn) + _ = f.users.(*collection).dropAllIndexes(ctx) } func TestNewCollectionIndex_IfDescriptionHasNoFields_ReturnError(t *testing.T) { diff --git a/db/indexed_docs_test.go b/db/indexed_docs_test.go index d10ad8eb5b..c3c1c6de7b 100644 --- a/db/indexed_docs_test.go +++ b/db/indexed_docs_test.go @@ -23,6 +23,8 @@ import ( "github.com/stretchr/testify/mock" "github.com/stretchr/testify/require" + "github.com/sourcenetwork/defradb/acp" + acpIdentity "github.com/sourcenetwork/defradb/acp/identity" "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/core" "github.com/sourcenetwork/defradb/datastore" @@ -58,7 +60,7 @@ func (f *indexTestFixture) newUserDoc(name string, age int, col client.Collectio data, err := json.Marshal(d) require.NoError(f.t, err) - doc, err := client.NewDocFromJSON(data, col.Schema()) + doc, err := client.NewDocFromJSON(data, col.Definition()) require.NoError(f.t, err) return doc } @@ -68,7 +70,7 @@ func (f *indexTestFixture) newProdDoc(id int, price float64, cat string, col cli data, err := json.Marshal(d) require.NoError(f.t, err) - doc, err := client.NewDocFromJSON(data, col.Schema()) + doc, err := client.NewDocFromJSON(data, col.Definition()) require.NoError(f.t, err) return doc } @@ -129,7 +131,8 @@ func (b *indexKeyBuilder) Build() core.IndexDataStoreKey { return key } - cols, err := b.f.db.getCollections(b.f.ctx, b.f.txn, client.CollectionFetchOptions{}) + ctx := SetContextTxn(b.f.ctx, b.f.txn) + cols, err := b.f.db.getCollections(ctx, client.CollectionFetchOptions{}) require.NoError(b.f.t, err) var collection client.Collection for _, col := range cols { @@ -166,15 +169,25 @@ indexLoop: hasNilValue := false for i, fieldName := range b.fieldsNames { fieldValue, err := b.doc.GetValue(fieldName) - var val any + var val client.NormalValue if err != nil { if !errors.Is(err, client.ErrFieldNotExist) { require.NoError(b.f.t, err) } - } else if fieldValue != nil { - val = fieldValue.Value() } - if val == nil { + if fieldValue != nil { + val = fieldValue.NormalValue() + } else { + kind := client.FieldKind_NILLABLE_STRING + if fieldName == usersAgeFieldName { + kind = client.FieldKind_NILLABLE_INT + } else if fieldName == usersWeightFieldName { + kind = client.FieldKind_NILLABLE_FLOAT + } + val, err = client.NewNormalNil(kind) + require.NoError(b.f.t, err) + } + if val.IsNil() { hasNilValue = true } descending := false @@ -185,7 +198,7 @@ indexLoop: } if !b.isUnique || hasNilValue { - key.Fields = append(key.Fields, core.IndexedField{Value: b.doc.ID().String()}) + key.Fields = append(key.Fields, core.IndexedField{Value: client.NewNormalString(b.doc.ID().String())}) } } @@ -310,7 +323,8 @@ func TestNonUnique_IfFailsToStoredIndexedDoc_Error(t *testing.T) { dataStoreOn.Put(mock.Anything, key.ToDS(), mock.Anything).Return(errors.New("error")) dataStoreOn.Put(mock.Anything, mock.Anything, mock.Anything).Return(nil) - err := f.users.WithTxn(mockTxn).Create(f.ctx, doc) + ctx := SetContextTxn(f.ctx, mockTxn) + err := f.users.Create(ctx, doc) require.ErrorIs(f.t, err, NewErrFailedToStoreIndexedField("name", nil)) } @@ -325,7 +339,7 @@ func TestNonUnique_IfDocDoesNotHaveIndexedField_SkipIndex(t *testing.T) { }{Age: 21, Weight: 154.1}) require.NoError(f.t, err) - doc, err := client.NewDocFromJSON(data, f.users.Schema()) + doc, err := client.NewDocFromJSON(data, f.users.Definition()) require.NoError(f.t, err) err = f.users.Create(f.ctx, doc) @@ -348,7 +362,8 @@ func TestNonUnique_IfSystemStorageHasInvalidIndexDescription_Error(t *testing.T) systemStoreOn.Query(mock.Anything, mock.Anything). Return(mocks.NewQueryResultsWithValues(t, []byte("invalid")), nil) - err := f.users.WithTxn(mockTxn).Create(f.ctx, doc) + ctx := SetContextTxn(f.ctx, mockTxn) + err := f.users.Create(ctx, doc) assert.ErrorIs(t, err, datastore.NewErrInvalidStoredValue(nil)) } @@ -366,7 +381,8 @@ func TestNonUnique_IfSystemStorageFailsToReadIndexDesc_Error(t *testing.T) { systemStoreOn.Query(mock.Anything, mock.Anything). Return(nil, testErr) - err := f.users.WithTxn(mockTxn).Create(f.ctx, doc) + ctx := SetContextTxn(f.ctx, mockTxn) + err := f.users.Create(ctx, doc) require.ErrorIs(t, err, testErr) } @@ -533,7 +549,7 @@ func TestNonUnique_IfIndexedFieldIsNil_StoreItAsNil(t *testing.T) { }{Age: 44}) require.NoError(f.t, err) - doc, err := client.NewDocFromJSON(docJSON, f.users.Schema()) + doc, err := client.NewDocFromJSON(docJSON, f.users.Definition()) require.NoError(f.t, err) f.saveDocToCollection(doc, f.users) @@ -578,8 +594,30 @@ func TestNonUniqueCreate_IfUponIndexingExistingDocsFetcherFails_ReturnError(t *t Name: "Fails to init", PrepareFetcher: func() fetcher.Fetcher { f := fetcherMocks.NewStubbedFetcher(t) - f.EXPECT().Init(mock.Anything, mock.Anything, mock.Anything, mock.Anything, mock.Anything, mock.Anything, mock.Anything, mock.Anything).Unset() - f.EXPECT().Init(mock.Anything, mock.Anything, mock.Anything, mock.Anything, mock.Anything, mock.Anything, mock.Anything, mock.Anything).Return(testError) + f.EXPECT().Init( + mock.Anything, + mock.Anything, + mock.Anything, + mock.Anything, + mock.Anything, + mock.Anything, + mock.Anything, + mock.Anything, + mock.Anything, + mock.Anything, + ).Unset() + f.EXPECT().Init( + mock.Anything, + mock.Anything, + mock.Anything, + mock.Anything, + mock.Anything, + mock.Anything, + mock.Anything, + mock.Anything, + mock.Anything, + mock.Anything, + ).Return(testError) f.EXPECT().Close().Unset() f.EXPECT().Close().Return(nil) return f @@ -756,7 +794,8 @@ func TestNonUniqueUpdate_IfFailsToReadIndexDescription_ReturnError(t *testing.T) require.NoError(t, err) // retrieve the collection without index cached - usersCol, err := f.db.getCollectionByName(f.ctx, f.txn, usersColName) + ctx := SetContextTxn(f.ctx, f.txn) + usersCol, err := f.db.getCollectionByName(ctx, usersColName) require.NoError(t, err) testErr := errors.New("test error") @@ -772,7 +811,8 @@ func TestNonUniqueUpdate_IfFailsToReadIndexDescription_ReturnError(t *testing.T) usersCol.(*collection).fetcherFactory = func() fetcher.Fetcher { return fetcherMocks.NewStubbedFetcher(t) } - err = usersCol.WithTxn(mockedTxn).Update(f.ctx, doc) + ctx = SetContextTxn(f.ctx, mockedTxn) + err = usersCol.Update(ctx, doc) require.ErrorIs(t, err, testErr) } @@ -787,8 +827,30 @@ func TestNonUniqueUpdate_IfFetcherFails_ReturnError(t *testing.T) { Name: "Fails to init", PrepareFetcher: func() fetcher.Fetcher { f := fetcherMocks.NewStubbedFetcher(t) - f.EXPECT().Init(mock.Anything, mock.Anything, mock.Anything, mock.Anything, mock.Anything, mock.Anything, mock.Anything, mock.Anything).Unset() - f.EXPECT().Init(mock.Anything, mock.Anything, mock.Anything, mock.Anything, mock.Anything, mock.Anything, mock.Anything, mock.Anything).Return(testError) + f.EXPECT().Init( + mock.Anything, + mock.Anything, + mock.Anything, + mock.Anything, + mock.Anything, + mock.Anything, + mock.Anything, + mock.Anything, + mock.Anything, + mock.Anything, + ).Unset() + f.EXPECT().Init( + mock.Anything, + mock.Anything, + mock.Anything, + mock.Anything, + mock.Anything, + mock.Anything, + mock.Anything, + mock.Anything, + mock.Anything, + mock.Anything, + ).Return(testError) f.EXPECT().Close().Unset() f.EXPECT().Close().Return(nil) return f @@ -886,11 +948,35 @@ func TestNonUniqueUpdate_ShouldPassToFetcherOnlyRelevantFields(t *testing.T) { f.users.(*collection).fetcherFactory = func() fetcher.Fetcher { f := fetcherMocks.NewStubbedFetcher(t) - f.EXPECT().Init(mock.Anything, mock.Anything, mock.Anything, mock.Anything, mock.Anything, mock.Anything, mock.Anything, mock.Anything).Unset() - f.EXPECT().Init(mock.Anything, mock.Anything, mock.Anything, mock.Anything, mock.Anything, mock.Anything, mock.Anything, mock.Anything). + f.EXPECT().Init( + mock.Anything, + mock.Anything, + mock.Anything, + mock.Anything, + mock.Anything, + mock.Anything, + mock.Anything, + mock.Anything, + mock.Anything, + mock.Anything, + ).Unset() + f.EXPECT().Init( + mock.Anything, + mock.Anything, + mock.Anything, + mock.Anything, + mock.Anything, + mock.Anything, + mock.Anything, + mock.Anything, + mock.Anything, + mock.Anything, + ). RunAndReturn(func( ctx context.Context, + identity immutable.Option[acpIdentity.Identity], txn datastore.Txn, + acp immutable.Option[acp.ACP], col client.Collection, fields []client.FieldDefinition, filter *mapper.Filter, @@ -968,7 +1054,8 @@ func TestNonUniqueUpdate_IfDatastoreFails_ReturnError(t *testing.T) { mockedTxn.EXPECT().Datastore().Unset() mockedTxn.EXPECT().Datastore().Return(mockedTxn.MockDatastore).Maybe() - err = f.users.WithTxn(mockedTxn).Update(f.ctx, doc) + ctx := SetContextTxn(f.ctx, mockedTxn) + err = f.users.Update(ctx, doc) require.ErrorIs(t, err, testErr) } } @@ -983,7 +1070,7 @@ func TestNonUpdate_IfIndexedFieldWasNil_ShouldDeleteIt(t *testing.T) { }{Age: 44}) require.NoError(f.t, err) - doc, err := client.NewDocFromJSON(docJSON, f.users.Schema()) + doc, err := client.NewDocFromJSON(docJSON, f.users.Definition()) require.NoError(f.t, err) f.saveDocToCollection(doc, f.users) @@ -1069,7 +1156,7 @@ func TestUnique_IfIndexedFieldIsNil_StoreItAsNil(t *testing.T) { }{Age: 44}) require.NoError(f.t, err) - doc, err := client.NewDocFromJSON(docJSON, f.users.Schema()) + doc, err := client.NewDocFromJSON(docJSON, f.users.Definition()) require.NoError(f.t, err) f.saveDocToCollection(doc, f.users) @@ -1183,7 +1270,7 @@ func TestComposite_IfIndexedFieldIsNil_StoreItAsNil(t *testing.T) { }{Age: 44}) require.NoError(f.t, err) - doc, err := client.NewDocFromJSON(docJSON, f.users.Schema()) + doc, err := client.NewDocFromJSON(docJSON, f.users.Definition()) require.NoError(f.t, err) f.saveDocToCollection(doc, f.users) @@ -1196,7 +1283,7 @@ func TestComposite_IfIndexedFieldIsNil_StoreItAsNil(t *testing.T) { assert.Len(t, data, 0) } -func TestComposite_IfNilUpdateToValue_ShouldUpdateIndexStored(t *testing.T) { +func TestUniqueComposite_IfNilUpdateToValue_ShouldUpdateIndexStored(t *testing.T) { testCases := []struct { Name string Doc string @@ -1238,34 +1325,36 @@ func TestComposite_IfNilUpdateToValue_ShouldUpdateIndexStored(t *testing.T) { } for _, tc := range testCases { - f := newIndexTestFixture(t) - defer f.db.Close() + t.Run(tc.Name, func(t *testing.T) { + f := newIndexTestFixture(t) + defer f.db.Close() - indexDesc := makeUnique(addFieldToIndex(getUsersIndexDescOnName(), usersAgeFieldName)) - _, err := f.createCollectionIndexFor(f.users.Name().Value(), indexDesc) - require.NoError(f.t, err) - f.commitTxn() + indexDesc := makeUnique(addFieldToIndex(getUsersIndexDescOnName(), usersAgeFieldName)) + _, err := f.createCollectionIndexFor(f.users.Name().Value(), indexDesc) + require.NoError(f.t, err) + f.commitTxn() - doc, err := client.NewDocFromJSON([]byte(tc.Doc), f.users.Schema()) - require.NoError(f.t, err) + doc, err := client.NewDocFromJSON([]byte(tc.Doc), f.users.Definition()) + require.NoError(f.t, err) - f.saveDocToCollection(doc, f.users) + f.saveDocToCollection(doc, f.users) - oldKey := newIndexKeyBuilder(f).Col(usersColName).Fields(usersNameFieldName, usersAgeFieldName). - Doc(doc).Unique().Build() + oldKey := newIndexKeyBuilder(f).Col(usersColName).Fields(usersNameFieldName, usersAgeFieldName). + Doc(doc).Unique().Build() - require.NoError(t, doc.SetWithJSON([]byte(tc.Update))) + require.NoError(t, doc.SetWithJSON([]byte(tc.Update))) - newKey := newIndexKeyBuilder(f).Col(usersColName).Fields(usersNameFieldName, usersAgeFieldName). - Doc(doc).Unique().Build() + newKey := newIndexKeyBuilder(f).Col(usersColName).Fields(usersNameFieldName, usersAgeFieldName). + Doc(doc).Unique().Build() - require.NoError(t, f.users.Update(f.ctx, doc), tc.Name) - f.commitTxn() + require.NoError(t, f.users.Update(f.ctx, doc), tc.Name) + f.commitTxn() - _, err = f.txn.Datastore().Get(f.ctx, oldKey.ToDS()) - require.Error(t, err, oldKey.ToString(), oldKey.ToDS(), tc.Name) - _, err = f.txn.Datastore().Get(f.ctx, newKey.ToDS()) - require.NoError(t, err, newKey.ToString(), newKey.ToDS(), tc.Name) + _, err = f.txn.Datastore().Get(f.ctx, oldKey.ToDS()) + require.Error(t, err, oldKey.ToString(), oldKey.ToDS(), tc.Name) + _, err = f.txn.Datastore().Get(f.ctx, newKey.ToDS()) + require.NoError(t, err, newKey.ToString(), newKey.ToDS(), tc.Name) + }) } } diff --git a/db/lens.go b/db/lens.go index d5240dad83..f21d084f88 100644 --- a/db/lens.go +++ b/db/lens.go @@ -18,12 +18,13 @@ import ( "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/core" - "github.com/sourcenetwork/defradb/datastore" "github.com/sourcenetwork/defradb/db/description" "github.com/sourcenetwork/defradb/errors" ) -func (db *db) setMigration(ctx context.Context, txn datastore.Txn, cfg client.LensConfig) error { +func (db *db) setMigration(ctx context.Context, cfg client.LensConfig) error { + txn := mustGetContextTxn(ctx) + dstCols, err := description.GetCollectionsBySchemaVersionID(ctx, txn, cfg.DestinationSchemaVersionID) if err != nil { return err @@ -34,7 +35,7 @@ func (db *db) setMigration(ctx context.Context, txn datastore.Txn, cfg client.Le return err } - colSeq, err := db.getSequence(ctx, txn, core.CollectionIDSequenceKey{}) + colSeq, err := db.getSequence(ctx, core.CollectionIDSequenceKey{}) if err != nil { return err } @@ -42,7 +43,7 @@ func (db *db) setMigration(ctx context.Context, txn datastore.Txn, cfg client.Le if len(sourceCols) == 0 { // If no collections are found with the given [SourceSchemaVersionID], this migration must be from // a collection/schema version that does not yet exist locally. We must now create it. - colID, err := colSeq.next(ctx, txn) + colID, err := colSeq.next(ctx) if err != nil { return err } @@ -86,7 +87,7 @@ func (db *db) setMigration(ctx context.Context, txn datastore.Txn, cfg client.Le if !isDstCollectionFound { // If the destination collection was not found, we must create it. This can happen when setting a migration // to a schema version that does not yet exist locally. - colID, err := colSeq.next(ctx, txn) + colID, err := colSeq.next(ctx) if err != nil { return err } diff --git a/db/permission/check.go b/db/permission/check.go new file mode 100644 index 0000000000..36dce10489 --- /dev/null +++ b/db/permission/check.go @@ -0,0 +1,92 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package permission + +import ( + "context" + + "github.com/sourcenetwork/immutable" + + "github.com/sourcenetwork/defradb/acp" + acpIdentity "github.com/sourcenetwork/defradb/acp/identity" + "github.com/sourcenetwork/defradb/client" +) + +// CheckAccessOfDocOnCollectionWithACP handles the check, which tells us if access to the target +// document is valid, with respect to the permission type, and the specified collection. +// +// This function should only be called if acp is available. As we have unrestricted +// access when acp is not available (acp turned off). +// +// Since we know acp is enabled we have these components to check in this function: +// (1) the request is permissioned (has an identity), +// (2) the collection is permissioned (has a policy), +// +// Unrestricted Access to document if: +// - (2) is false. +// - Document is public (unregistered), whether signatured request or not doesn't matter. +func CheckAccessOfDocOnCollectionWithACP( + ctx context.Context, + identity immutable.Option[acpIdentity.Identity], + acpSystem acp.ACP, + collection client.Collection, + permission acp.DPIPermission, + docID string, +) (bool, error) { + // Even if acp exists, but there is no policy on the collection (unpermissioned collection) + // then we still have unrestricted access. + policyID, resourceName, hasPolicy := isPermissioned(collection) + if !hasPolicy { + return true, nil + } + + // Now that we know acp is available and the collection is permissioned, before checking access with + // acp directly we need to make sure that the document is not public, as public documents will not + // be regestered with acp. We give unrestricted access to public documents, so it does not matter + // whether the request has a signature identity or not at this stage of the check. + isRegistered, err := acpSystem.IsDocRegistered( + ctx, + policyID, + resourceName, + docID, + ) + if err != nil { + return false, err + } + + if !isRegistered { + // Unrestricted access as it is a public document. + return true, nil + } + + // At this point if the request is not signatured, then it has no access, because: + // the collection has a policy on it, and the acp is enabled/available, + // and the document is not public (is regestered with acp). + if !identity.HasValue() { + return false, nil + } + + // Now actually check using the signature if this identity has access or not. + hasAccess, err := acpSystem.CheckDocAccess( + ctx, + permission, + identity.Value().String(), + policyID, + resourceName, + docID, + ) + + if err != nil { + return false, err + } + + return hasAccess, nil +} diff --git a/db/permission/permission.go b/db/permission/permission.go new file mode 100644 index 0000000000..3b365cba75 --- /dev/null +++ b/db/permission/permission.go @@ -0,0 +1,32 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package permission + +import ( + "github.com/sourcenetwork/defradb/client" +) + +// isPermissioned returns true if the collection has a policy, otherwise returns false. +// +// This tells us if access control is enabled for this collection or not. +// +// When there is a policy, in addition to returning true in the last return value, the +// first returned value is policyID, second is the resource name. +func isPermissioned(collection client.Collection) (string, string, bool) { + policy := collection.Definition().Description.Policy + if policy.HasValue() && + policy.Value().ID != "" && + policy.Value().ResourceName != "" { + return policy.Value().ID, policy.Value().ResourceName, true + } + + return "", "", false +} diff --git a/db/permission/register.go b/db/permission/register.go new file mode 100644 index 0000000000..a46e5eef34 --- /dev/null +++ b/db/permission/register.go @@ -0,0 +1,51 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package permission + +import ( + "context" + + "github.com/sourcenetwork/immutable" + + "github.com/sourcenetwork/defradb/acp" + acpIdentity "github.com/sourcenetwork/defradb/acp/identity" + "github.com/sourcenetwork/defradb/client" +) + +// RegisterDocOnCollectionWithACP handles the registration of the document with acp. +// +// Since acp will always exist when this is called we have these components to worry about: +// (1) the request is permissioned (has an identity signature), +// (2) the collection is permissioned (has a policy), +// +// The document is only registered if all (1) (2) are true. +// +// Otherwise, nothing is registered with acp. +func RegisterDocOnCollectionWithACP( + ctx context.Context, + identity immutable.Option[acpIdentity.Identity], + acpSystem acp.ACP, + collection client.Collection, + docID string, +) error { + // An identity exists and the collection has a policy. + if policyID, resourceName, hasPolicy := isPermissioned(collection); hasPolicy && identity.HasValue() { + return acpSystem.RegisterDocObject( + ctx, + identity.Value().String(), + policyID, + resourceName, + docID, + ) + } + + return nil +} diff --git a/db/request.go b/db/request.go index 69eabebd34..83a2fb09bb 100644 --- a/db/request.go +++ b/db/request.go @@ -14,12 +14,11 @@ import ( "context" "github.com/sourcenetwork/defradb/client" - "github.com/sourcenetwork/defradb/datastore" "github.com/sourcenetwork/defradb/planner" ) // execRequest executes a request against the database. -func (db *db) execRequest(ctx context.Context, request string, txn datastore.Txn) *client.RequestResult { +func (db *db) execRequest(ctx context.Context, request string) *client.RequestResult { res := &client.RequestResult{} ast, err := db.parser.BuildRequestAST(request) if err != nil { @@ -48,7 +47,15 @@ func (db *db) execRequest(ctx context.Context, request string, txn datastore.Txn return res } - planner := planner.New(ctx, db.WithTxn(txn), txn) + txn := mustGetContextTxn(ctx) + identity := GetContextIdentity(ctx) + planner := planner.New( + ctx, + identity, + db.acp, + db, + txn, + ) results, err := planner.RunRequest(ctx, parsedRequest) if err != nil { diff --git a/db/schema.go b/db/schema.go index a4582158f3..756c02f1ff 100644 --- a/db/schema.go +++ b/db/schema.go @@ -23,7 +23,6 @@ import ( "github.com/sourcenetwork/immutable" "github.com/sourcenetwork/defradb/client" - "github.com/sourcenetwork/defradb/datastore" "github.com/sourcenetwork/defradb/db/description" ) @@ -37,7 +36,6 @@ const ( // and creates the necessary collections, request types, etc. func (db *db) addSchema( ctx context.Context, - txn datastore.Txn, schemaString string, ) ([]client.CollectionDescription, error) { newDefinitions, err := db.parser.ParseSDL(ctx, schemaString) @@ -47,14 +45,20 @@ func (db *db) addSchema( returnDescriptions := make([]client.CollectionDescription, len(newDefinitions)) for i, definition := range newDefinitions { - col, err := db.createCollection(ctx, txn, definition) + // Only accept the schema if policy description is valid, otherwise reject the schema. + err := db.validateCollectionDefinitionPolicyDesc(ctx, definition.Description.Policy) + if err != nil { + return nil, err + } + + col, err := db.createCollection(ctx, definition, newDefinitions) if err != nil { return nil, err } returnDescriptions[i] = col.Description() } - err = db.loadSchema(ctx, txn) + err = db.loadSchema(ctx) if err != nil { return nil, err } @@ -62,8 +66,10 @@ func (db *db) addSchema( return returnDescriptions, nil } -func (db *db) loadSchema(ctx context.Context, txn datastore.Txn) error { - definitions, err := db.getAllActiveDefinitions(ctx, txn) +func (db *db) loadSchema(ctx context.Context) error { + txn := mustGetContextTxn(ctx) + + definitions, err := db.getAllActiveDefinitions(ctx) if err != nil { return err } @@ -84,11 +90,12 @@ func (db *db) loadSchema(ctx context.Context, txn datastore.Txn) error { // will be applied. func (db *db) patchSchema( ctx context.Context, - txn datastore.Txn, patchString string, migration immutable.Option[model.Lens], setAsDefaultVersion bool, ) error { + txn := mustGetContextTxn(ctx) + patch, err := jsonpatch.DecodePatch([]byte(patchString)) if err != nil { return err @@ -131,7 +138,6 @@ func (db *db) patchSchema( for _, schema := range newSchemaByName { err := db.updateSchema( ctx, - txn, existingSchemaByName, newSchemaByName, schema, @@ -143,7 +149,7 @@ func (db *db) patchSchema( } } - return db.loadSchema(ctx, txn) + return db.loadSchema(ctx) } // substituteSchemaPatch handles any substitution of values that may be required before @@ -170,10 +176,10 @@ func substituteSchemaPatch( return nil, err } - path = strings.TrimPrefix(path, "/") - splitPath := strings.Split(path, "/") - if value, hasValue := patchOperation["value"]; hasValue { + path = strings.TrimPrefix(path, "/") + splitPath := strings.Split(path, "/") + var newPatchValue immutable.Option[any] var field map[string]any isField := isField(splitPath) @@ -223,40 +229,6 @@ func substituteSchemaPatch( } } - if isField { - if kind, isString := field["Kind"].(string); isString { - substitute, schemaName, err := getSubstituteFieldKind(kind, schemaByName) - if err != nil { - return nil, err - } - - field["Kind"] = substitute - if schemaName != "" { - if field["Schema"] != nil && field["Schema"] != schemaName { - return nil, NewErrFieldKindDoesNotMatchFieldSchema(kind, field["Schema"].(string)) - } - field["Schema"] = schemaName - } - - newPatchValue = immutable.Some[any](field) - } - } else if isFieldKind(splitPath) { - var kind any - err = json.Unmarshal(*value, &kind) - if err != nil { - return nil, err - } - - if kind, isString := kind.(string); isString { - substitute, _, err := getSubstituteFieldKind(kind, schemaByName) - if err != nil { - return nil, err - } - - newPatchValue = immutable.Some[any](substitute) - } - } - if newPatchValue.HasValue() { substitute, err := json.Marshal(newPatchValue.Value()) if err != nil { @@ -274,10 +246,9 @@ func substituteSchemaPatch( func (db *db) getSchemaByVersionID( ctx context.Context, - txn datastore.Txn, versionID string, ) (client.SchemaDescription, error) { - schemas, err := db.getSchemas(ctx, txn, client.SchemaFetchOptions{ID: immutable.Some(versionID)}) + schemas, err := db.getSchemas(ctx, client.SchemaFetchOptions{ID: immutable.Some(versionID)}) if err != nil { return client.SchemaDescription{}, err } @@ -288,9 +259,10 @@ func (db *db) getSchemaByVersionID( func (db *db) getSchemas( ctx context.Context, - txn datastore.Txn, options client.SchemaFetchOptions, ) ([]client.SchemaDescription, error) { + txn := mustGetContextTxn(ctx) + schemas := []client.SchemaDescription{} switch { @@ -331,36 +303,6 @@ func (db *db) getSchemas( return result, nil } -// getSubstituteFieldKind checks and attempts to get the underlying integer value for the given string -// Field Kind value. It will return the value if one is found, else returns an [ErrFieldKindNotFound]. -// -// If the value represents a foreign relation the collection name will also be returned. -func getSubstituteFieldKind( - kind string, - schemaByName map[string]client.SchemaDescription, -) (client.FieldKind, string, error) { - substitute, substituteFound := client.FieldKindStringToEnumMapping[kind] - if substituteFound { - return substitute, "", nil - } else { - var collectionName string - var substitute client.FieldKind - if len(kind) > 0 && kind[0] == '[' && kind[len(kind)-1] == ']' { - collectionName = kind[1 : len(kind)-1] - substitute = client.FieldKind_FOREIGN_OBJECT_ARRAY - } else { - collectionName = kind - substitute = client.FieldKind_FOREIGN_OBJECT - } - - if _, substituteFound := schemaByName[collectionName]; substituteFound { - return substitute, collectionName, nil - } - - return 0, "", NewErrFieldKindNotFound(kind) - } -} - // isFieldOrInner returns true if the given path points to a SchemaFieldDescription or a property within it. func isFieldOrInner(path []string) bool { //nolint:goconst @@ -372,13 +314,6 @@ func isField(path []string) bool { return len(path) == 3 && path[fieldsPathIndex] == "Fields" } -// isField returns true if the given path points to a SchemaFieldDescription.Kind property. -func isFieldKind(path []string) bool { - return len(path) == 4 && - path[fieldIndexPathIndex+1] == "Kind" && - path[fieldsPathIndex] == "Fields" -} - // containsLetter returns true if the string contains a single unicode character. func containsLetter(s string) bool { for _, r := range s { diff --git a/db/sequence.go b/db/sequence.go index 3c510ec78c..f39bdcfb65 100644 --- a/db/sequence.go +++ b/db/sequence.go @@ -17,7 +17,6 @@ import ( ds "github.com/ipfs/go-datastore" "github.com/sourcenetwork/defradb/core" - "github.com/sourcenetwork/defradb/datastore" "github.com/sourcenetwork/defradb/errors" ) @@ -26,15 +25,15 @@ type sequence struct { val uint64 } -func (db *db) getSequence(ctx context.Context, txn datastore.Txn, key core.Key) (*sequence, error) { +func (db *db) getSequence(ctx context.Context, key core.Key) (*sequence, error) { seq := &sequence{ key: key, val: uint64(0), } - _, err := seq.get(ctx, txn) + _, err := seq.get(ctx) if errors.Is(err, ds.ErrNotFound) { - err = seq.update(ctx, txn) + err = seq.update(ctx) if err != nil { return nil, err } @@ -45,7 +44,9 @@ func (db *db) getSequence(ctx context.Context, txn datastore.Txn, key core.Key) return seq, nil } -func (seq *sequence) get(ctx context.Context, txn datastore.Txn) (uint64, error) { +func (seq *sequence) get(ctx context.Context) (uint64, error) { + txn := mustGetContextTxn(ctx) + val, err := txn.Systemstore().Get(ctx, seq.key.ToDS()) if err != nil { return 0, err @@ -55,7 +56,9 @@ func (seq *sequence) get(ctx context.Context, txn datastore.Txn) (uint64, error) return seq.val, nil } -func (seq *sequence) update(ctx context.Context, txn datastore.Txn) error { +func (seq *sequence) update(ctx context.Context) error { + txn := mustGetContextTxn(ctx) + var buf [8]byte binary.BigEndian.PutUint64(buf[:], seq.val) if err := txn.Systemstore().Put(ctx, seq.key.ToDS(), buf[:]); err != nil { @@ -65,12 +68,12 @@ func (seq *sequence) update(ctx context.Context, txn datastore.Txn) error { return nil } -func (seq *sequence) next(ctx context.Context, txn datastore.Txn) (uint64, error) { - _, err := seq.get(ctx, txn) +func (seq *sequence) next(ctx context.Context) (uint64, error) { + _, err := seq.get(ctx) if err != nil { return 0, err } seq.val++ - return seq.val, seq.update(ctx, txn) + return seq.val, seq.update(ctx) } diff --git a/db/store.go b/db/store.go new file mode 100644 index 0000000000..1686b9af3e --- /dev/null +++ b/db/store.go @@ -0,0 +1,271 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package db + +import ( + "context" + + "github.com/lens-vm/lens/host-go/config/model" + + "github.com/sourcenetwork/immutable" + + "github.com/sourcenetwork/defradb/client" +) + +// ExecRequest executes a request against the database. +func (db *db) ExecRequest(ctx context.Context, request string) *client.RequestResult { + ctx, txn, err := ensureContextTxn(ctx, db, false) + if err != nil { + res := &client.RequestResult{} + res.GQL.Errors = []error{err} + return res + } + defer txn.Discard(ctx) + + res := db.execRequest(ctx, request) + if len(res.GQL.Errors) > 0 { + return res + } + + if err := txn.Commit(ctx); err != nil { + res.GQL.Errors = []error{err} + return res + } + + return res +} + +// GetCollectionByName returns an existing collection within the database. +func (db *db) GetCollectionByName(ctx context.Context, name string) (client.Collection, error) { + ctx, txn, err := ensureContextTxn(ctx, db, true) + if err != nil { + return nil, err + } + defer txn.Discard(ctx) + + return db.getCollectionByName(ctx, name) +} + +// GetCollections gets all the currently defined collections. +func (db *db) GetCollections( + ctx context.Context, + options client.CollectionFetchOptions, +) ([]client.Collection, error) { + ctx, txn, err := ensureContextTxn(ctx, db, true) + if err != nil { + return nil, err + } + defer txn.Discard(ctx) + + return db.getCollections(ctx, options) +} + +// GetSchemaByVersionID returns the schema description for the schema version of the +// ID provided. +// +// Will return an error if it is not found. +func (db *db) GetSchemaByVersionID(ctx context.Context, versionID string) (client.SchemaDescription, error) { + ctx, txn, err := ensureContextTxn(ctx, db, true) + if err != nil { + return client.SchemaDescription{}, err + } + defer txn.Discard(ctx) + + return db.getSchemaByVersionID(ctx, versionID) +} + +// GetSchemas returns all schema versions that currently exist within +// this [Store]. +func (db *db) GetSchemas( + ctx context.Context, + options client.SchemaFetchOptions, +) ([]client.SchemaDescription, error) { + ctx, txn, err := ensureContextTxn(ctx, db, true) + if err != nil { + return nil, err + } + defer txn.Discard(ctx) + + return db.getSchemas(ctx, options) +} + +// GetAllIndexes gets all the indexes in the database. +func (db *db) GetAllIndexes( + ctx context.Context, +) (map[client.CollectionName][]client.IndexDescription, error) { + ctx, txn, err := ensureContextTxn(ctx, db, true) + if err != nil { + return nil, err + } + defer txn.Discard(ctx) + + return db.getAllIndexDescriptions(ctx) +} + +// AddSchema takes the provided GQL schema in SDL format, and applies it to the database, +// creating the necessary collections, request types, etc. +// +// All schema types provided must not exist prior to calling this, and they may not reference existing +// types previously defined. +func (db *db) AddSchema(ctx context.Context, schemaString string) ([]client.CollectionDescription, error) { + ctx, txn, err := ensureContextTxn(ctx, db, false) + if err != nil { + return nil, err + } + defer txn.Discard(ctx) + + cols, err := db.addSchema(ctx, schemaString) + if err != nil { + return nil, err + } + + if err := txn.Commit(ctx); err != nil { + return nil, err + } + return cols, nil +} + +// PatchSchema takes the given JSON patch string and applies it to the set of CollectionDescriptions +// present in the database. +// +// It will also update the GQL types used by the query system. It will error and not apply any of the +// requested, valid updates should the net result of the patch result in an invalid state. The +// individual operations defined in the patch do not need to result in a valid state, only the net result +// of the full patch. +// +// The collections (including the schema version ID) will only be updated if any changes have actually +// been made, if the net result of the patch matches the current persisted description then no changes +// will be applied. +func (db *db) PatchSchema( + ctx context.Context, + patchString string, + migration immutable.Option[model.Lens], + setAsDefaultVersion bool, +) error { + ctx, txn, err := ensureContextTxn(ctx, db, false) + if err != nil { + return err + } + defer txn.Discard(ctx) + + err = db.patchSchema(ctx, patchString, migration, setAsDefaultVersion) + if err != nil { + return err + } + + return txn.Commit(ctx) +} + +func (db *db) PatchCollection( + ctx context.Context, + patchString string, +) error { + ctx, txn, err := ensureContextTxn(ctx, db, false) + if err != nil { + return err + } + defer txn.Discard(ctx) + + err = db.patchCollection(ctx, patchString) + if err != nil { + return err + } + + return txn.Commit(ctx) +} + +func (db *db) SetActiveSchemaVersion(ctx context.Context, schemaVersionID string) error { + ctx, txn, err := ensureContextTxn(ctx, db, false) + if err != nil { + return err + } + defer txn.Discard(ctx) + + err = db.setActiveSchemaVersion(ctx, schemaVersionID) + if err != nil { + return err + } + + return txn.Commit(ctx) +} + +func (db *db) SetMigration(ctx context.Context, cfg client.LensConfig) error { + ctx, txn, err := ensureContextTxn(ctx, db, false) + if err != nil { + return err + } + defer txn.Discard(ctx) + + err = db.setMigration(ctx, cfg) + if err != nil { + return err + } + + return txn.Commit(ctx) +} + +func (db *db) AddView( + ctx context.Context, + query string, + sdl string, + transform immutable.Option[model.Lens], +) ([]client.CollectionDefinition, error) { + ctx, txn, err := ensureContextTxn(ctx, db, false) + if err != nil { + return nil, err + } + defer txn.Discard(ctx) + + defs, err := db.addView(ctx, query, sdl, transform) + if err != nil { + return nil, err + } + + err = txn.Commit(ctx) + if err != nil { + return nil, err + } + + return defs, nil +} + +// BasicImport imports a json dataset. +// filepath must be accessible to the node. +func (db *db) BasicImport(ctx context.Context, filepath string) error { + ctx, txn, err := ensureContextTxn(ctx, db, false) + if err != nil { + return err + } + defer txn.Discard(ctx) + + err = db.basicImport(ctx, filepath) + if err != nil { + return err + } + + return txn.Commit(ctx) +} + +// BasicExport exports the current data or subset of data to file in json format. +func (db *db) BasicExport(ctx context.Context, config *client.BackupConfig) error { + ctx, txn, err := ensureContextTxn(ctx, db, true) + if err != nil { + return err + } + defer txn.Discard(ctx) + + err = db.basicExport(ctx, config) + if err != nil { + return err + } + + return txn.Commit(ctx) +} diff --git a/db/subscriptions.go b/db/subscriptions.go index 2e7d2d4123..0d16074887 100644 --- a/db/subscriptions.go +++ b/db/subscriptions.go @@ -15,7 +15,6 @@ import ( "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/client/request" - "github.com/sourcenetwork/defradb/datastore" "github.com/sourcenetwork/defradb/events" "github.com/sourcenetwork/defradb/planner" ) @@ -55,24 +54,31 @@ func (db *db) handleSubscription( for evt := range pub.Event() { txn, err := db.NewTxn(ctx, false) if err != nil { - log.Error(ctx, err.Error()) + log.ErrorContext(ctx, err.Error()) continue } - db.handleEvent(ctx, txn, pub, evt, r) - + ctx := SetContextTxn(ctx, txn) + db.handleEvent(ctx, pub, evt, r) txn.Discard(ctx) } } func (db *db) handleEvent( ctx context.Context, - txn datastore.Txn, pub *events.Publisher[events.Update], evt events.Update, r *request.ObjectSubscription, ) { - p := planner.New(ctx, db.WithTxn(txn), txn) + txn := mustGetContextTxn(ctx) + identity := GetContextIdentity(ctx) + p := planner.New( + ctx, + identity, + db.acp, + db, + txn, + ) s := r.ToSelect(evt.DocID, evt.Cid.String()) diff --git a/db/txn_db.go b/db/txn_db.go deleted file mode 100644 index f2fbe7cea3..0000000000 --- a/db/txn_db.go +++ /dev/null @@ -1,391 +0,0 @@ -// Copyright 2023 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -package db - -import ( - "context" - - "github.com/lens-vm/lens/host-go/config/model" - "github.com/sourcenetwork/immutable" - - "github.com/sourcenetwork/defradb/client" - "github.com/sourcenetwork/defradb/datastore" -) - -var _ client.DB = (*implicitTxnDB)(nil) -var _ client.DB = (*explicitTxnDB)(nil) -var _ client.Store = (*implicitTxnDB)(nil) -var _ client.Store = (*explicitTxnDB)(nil) - -type implicitTxnDB struct { - *db -} - -type explicitTxnDB struct { - *db - txn datastore.Txn - lensRegistry client.LensRegistry -} - -// ExecRequest executes a request against the database. -func (db *implicitTxnDB) ExecRequest(ctx context.Context, request string) *client.RequestResult { - txn, err := db.NewTxn(ctx, false) - if err != nil { - res := &client.RequestResult{} - res.GQL.Errors = []error{err} - return res - } - defer txn.Discard(ctx) - - res := db.execRequest(ctx, request, txn) - if len(res.GQL.Errors) > 0 { - return res - } - - if err := txn.Commit(ctx); err != nil { - res.GQL.Errors = []error{err} - return res - } - - return res -} - -// ExecRequest executes a transaction request against the database. -func (db *explicitTxnDB) ExecRequest( - ctx context.Context, - request string, -) *client.RequestResult { - return db.execRequest(ctx, request, db.txn) -} - -// GetCollectionByName returns an existing collection within the database. -func (db *implicitTxnDB) GetCollectionByName(ctx context.Context, name string) (client.Collection, error) { - txn, err := db.NewTxn(ctx, true) - if err != nil { - return nil, err - } - defer txn.Discard(ctx) - - return db.getCollectionByName(ctx, txn, name) -} - -// GetCollectionByName returns an existing collection within the database. -func (db *explicitTxnDB) GetCollectionByName(ctx context.Context, name string) (client.Collection, error) { - col, err := db.getCollectionByName(ctx, db.txn, name) - if err != nil { - return nil, err - } - - return col.WithTxn(db.txn), nil -} - -// GetCollections gets all the currently defined collections. -func (db *implicitTxnDB) GetCollections( - ctx context.Context, - options client.CollectionFetchOptions, -) ([]client.Collection, error) { - txn, err := db.NewTxn(ctx, true) - if err != nil { - return nil, err - } - defer txn.Discard(ctx) - - return db.getCollections(ctx, txn, options) -} - -// GetCollections gets all the currently defined collections. -func (db *explicitTxnDB) GetCollections( - ctx context.Context, - options client.CollectionFetchOptions, -) ([]client.Collection, error) { - cols, err := db.getCollections(ctx, db.txn, options) - if err != nil { - return nil, err - } - - for i := range cols { - cols[i] = cols[i].WithTxn(db.txn) - } - - return cols, nil -} - -// GetSchemaByVersionID returns the schema description for the schema version of the -// ID provided. -// -// Will return an error if it is not found. -func (db *implicitTxnDB) GetSchemaByVersionID(ctx context.Context, versionID string) (client.SchemaDescription, error) { - txn, err := db.NewTxn(ctx, true) - if err != nil { - return client.SchemaDescription{}, err - } - defer txn.Discard(ctx) - - return db.getSchemaByVersionID(ctx, txn, versionID) -} - -// GetSchemaByVersionID returns the schema description for the schema version of the -// ID provided. -// -// Will return an error if it is not found. -func (db *explicitTxnDB) GetSchemaByVersionID(ctx context.Context, versionID string) (client.SchemaDescription, error) { - return db.getSchemaByVersionID(ctx, db.txn, versionID) -} - -// GetSchemas returns all schema versions that currently exist within -// this [Store]. -func (db *implicitTxnDB) GetSchemas( - ctx context.Context, - options client.SchemaFetchOptions, -) ([]client.SchemaDescription, error) { - txn, err := db.NewTxn(ctx, true) - if err != nil { - return nil, err - } - defer txn.Discard(ctx) - - return db.getSchemas(ctx, txn, options) -} - -// GetSchemas returns all schema versions that currently exist within -// this [Store]. -func (db *explicitTxnDB) GetSchemas( - ctx context.Context, - options client.SchemaFetchOptions, -) ([]client.SchemaDescription, error) { - return db.getSchemas(ctx, db.txn, options) -} - -// GetAllIndexes gets all the indexes in the database. -func (db *implicitTxnDB) GetAllIndexes( - ctx context.Context, -) (map[client.CollectionName][]client.IndexDescription, error) { - txn, err := db.NewTxn(ctx, true) - if err != nil { - return nil, err - } - defer txn.Discard(ctx) - - return db.getAllIndexDescriptions(ctx, txn) -} - -// GetAllIndexes gets all the indexes in the database. -func (db *explicitTxnDB) GetAllIndexes( - ctx context.Context, -) (map[client.CollectionName][]client.IndexDescription, error) { - return db.getAllIndexDescriptions(ctx, db.txn) -} - -// AddSchema takes the provided GQL schema in SDL format, and applies it to the database, -// creating the necessary collections, request types, etc. -// -// All schema types provided must not exist prior to calling this, and they may not reference existing -// types previously defined. -func (db *implicitTxnDB) AddSchema(ctx context.Context, schemaString string) ([]client.CollectionDescription, error) { - txn, err := db.NewTxn(ctx, false) - if err != nil { - return nil, err - } - defer txn.Discard(ctx) - - cols, err := db.addSchema(ctx, txn, schemaString) - if err != nil { - return nil, err - } - - if err := txn.Commit(ctx); err != nil { - return nil, err - } - return cols, nil -} - -// AddSchema takes the provided GQL schema in SDL format, and applies it to the database, -// creating the necessary collections, request types, etc. -// -// All schema types provided must not exist prior to calling this, and they may not reference existing -// types previously defined. -func (db *explicitTxnDB) AddSchema(ctx context.Context, schemaString string) ([]client.CollectionDescription, error) { - return db.addSchema(ctx, db.txn, schemaString) -} - -// PatchSchema takes the given JSON patch string and applies it to the set of CollectionDescriptions -// present in the database. -// -// It will also update the GQL types used by the query system. It will error and not apply any of the -// requested, valid updates should the net result of the patch result in an invalid state. The -// individual operations defined in the patch do not need to result in a valid state, only the net result -// of the full patch. -// -// The collections (including the schema version ID) will only be updated if any changes have actually -// been made, if the net result of the patch matches the current persisted description then no changes -// will be applied. -func (db *implicitTxnDB) PatchSchema( - ctx context.Context, - patchString string, - migration immutable.Option[model.Lens], - setAsDefaultVersion bool, -) error { - txn, err := db.NewTxn(ctx, false) - if err != nil { - return err - } - defer txn.Discard(ctx) - - err = db.patchSchema(ctx, txn, patchString, migration, setAsDefaultVersion) - if err != nil { - return err - } - - return txn.Commit(ctx) -} - -// PatchSchema takes the given JSON patch string and applies it to the set of CollectionDescriptions -// present in the database. -// -// It will also update the GQL types used by the query system. It will error and not apply any of the -// requested, valid updates should the net result of the patch result in an invalid state. The -// individual operations defined in the patch do not need to result in a valid state, only the net result -// of the full patch. -// -// The collections (including the schema version ID) will only be updated if any changes have actually -// been made, if the net result of the patch matches the current persisted description then no changes -// will be applied. -func (db *explicitTxnDB) PatchSchema( - ctx context.Context, - patchString string, - migration immutable.Option[model.Lens], - setAsDefaultVersion bool, -) error { - return db.patchSchema(ctx, db.txn, patchString, migration, setAsDefaultVersion) -} - -func (db *implicitTxnDB) SetActiveSchemaVersion(ctx context.Context, schemaVersionID string) error { - txn, err := db.NewTxn(ctx, false) - if err != nil { - return err - } - defer txn.Discard(ctx) - - err = db.setActiveSchemaVersion(ctx, txn, schemaVersionID) - if err != nil { - return err - } - - return txn.Commit(ctx) -} - -func (db *explicitTxnDB) SetActiveSchemaVersion(ctx context.Context, schemaVersionID string) error { - return db.setActiveSchemaVersion(ctx, db.txn, schemaVersionID) -} - -func (db *implicitTxnDB) SetMigration(ctx context.Context, cfg client.LensConfig) error { - txn, err := db.NewTxn(ctx, false) - if err != nil { - return err - } - defer txn.Discard(ctx) - - err = db.setMigration(ctx, txn, cfg) - if err != nil { - return err - } - - return txn.Commit(ctx) -} - -func (db *explicitTxnDB) SetMigration(ctx context.Context, cfg client.LensConfig) error { - return db.setMigration(ctx, db.txn, cfg) -} - -func (db *implicitTxnDB) AddView( - ctx context.Context, - query string, - sdl string, - transform immutable.Option[model.Lens], -) ([]client.CollectionDefinition, error) { - txn, err := db.NewTxn(ctx, false) - if err != nil { - return nil, err - } - defer txn.Discard(ctx) - - defs, err := db.addView(ctx, txn, query, sdl, transform) - if err != nil { - return nil, err - } - - err = txn.Commit(ctx) - if err != nil { - return nil, err - } - - return defs, nil -} - -func (db *explicitTxnDB) AddView( - ctx context.Context, - query string, - sdl string, - transform immutable.Option[model.Lens], -) ([]client.CollectionDefinition, error) { - return db.addView(ctx, db.txn, query, sdl, transform) -} - -// BasicImport imports a json dataset. -// filepath must be accessible to the node. -func (db *implicitTxnDB) BasicImport(ctx context.Context, filepath string) error { - txn, err := db.NewTxn(ctx, false) - if err != nil { - return err - } - defer txn.Discard(ctx) - - err = db.basicImport(ctx, txn, filepath) - if err != nil { - return err - } - - return txn.Commit(ctx) -} - -// BasicImport imports a json dataset. -// filepath must be accessible to the node. -func (db *explicitTxnDB) BasicImport(ctx context.Context, filepath string) error { - return db.basicImport(ctx, db.txn, filepath) -} - -// BasicExport exports the current data or subset of data to file in json format. -func (db *implicitTxnDB) BasicExport(ctx context.Context, config *client.BackupConfig) error { - txn, err := db.NewTxn(ctx, true) - if err != nil { - return err - } - defer txn.Discard(ctx) - - err = db.basicExport(ctx, txn, config) - if err != nil { - return err - } - - return txn.Commit(ctx) -} - -// BasicExport exports the current data or subset of data to file in json format. -func (db *explicitTxnDB) BasicExport(ctx context.Context, config *client.BackupConfig) error { - return db.basicExport(ctx, db.txn, config) -} - -// LensRegistry returns the LensRegistry in use by this database instance. -// -// It exposes several useful thread-safe migration related functions. -func (db *explicitTxnDB) LensRegistry() client.LensRegistry { - return db.lensRegistry -} diff --git a/db/view.go b/db/view.go index ea57f94541..7cf040cbc5 100644 --- a/db/view.go +++ b/db/view.go @@ -20,17 +20,17 @@ import ( "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/client/request" - "github.com/sourcenetwork/defradb/datastore" "github.com/sourcenetwork/defradb/db/description" ) func (db *db) addView( ctx context.Context, - txn datastore.Txn, inputQuery string, sdl string, transform immutable.Option[model.Lens], ) ([]client.CollectionDefinition, error) { + txn := mustGetContextTxn(ctx) + // Wrap the given query as part of the GQL query object - this simplifies the syntax for users // and ensures that we can't be given mutations. In the future this line should disappear along // with the all calls to the parser appart from `ParseSDL` when we implement the DQL stuff. @@ -80,7 +80,7 @@ func (db *db) addView( Schema: schema, } } else { - col, err := db.createCollection(ctx, txn, definition) + col, err := db.createCollection(ctx, definition, newDefinitions) if err != nil { return nil, err } @@ -97,7 +97,7 @@ func (db *db) addView( } } - err = db.loadSchema(ctx, txn) + err = db.loadSchema(ctx) if err != nil { return nil, err } diff --git a/docs/cli/defradb.md b/docs/cli/defradb.md index c89ce0f1aa..602206e575 100644 --- a/docs/cli/defradb.md +++ b/docs/cli/defradb.md @@ -14,11 +14,13 @@ Start a DefraDB node, interact with a local or remote node, and much more. ``` --allowed-origins stringArray List of origins to allow for CORS requests -h, --help help for defradb - --logformat string Log format to use. Options are csv, json (default "csv") - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs + --log-format string Log format to use. Options are text or json (default "text") + --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-no-color Disable colored log output + --log-output string Log output path. Options are stderr or stdout. (default "stderr") + --log-overrides string Logger config overrides. Format ,=,...;,... + --log-source Include source location in logs + --log-stacktrace Include stacktrace in error and fatal logs --max-txn-retries int Specify the maximum number of retries per transaction (default 5) --no-p2p Disable the peer-to-peer network synchronization system --p2paddr strings Listen addresses for the p2p network (formatted as a libp2p MultiAddr) (default [/ip4/127.0.0.1/tcp/9171]) diff --git a/docs/cli/defradb_client.md b/docs/cli/defradb_client.md index 30e8c804ee..302e171dd3 100644 --- a/docs/cli/defradb_client.md +++ b/docs/cli/defradb_client.md @@ -10,19 +10,22 @@ Execute queries, add schema types, obtain node info, etc. ### Options ``` - -h, --help help for client - --tx uint Transaction ID + -h, --help help for client + -i, --identity string ACP Identity + --tx uint Transaction ID ``` ### Options inherited from parent commands ``` --allowed-origins stringArray List of origins to allow for CORS requests - --logformat string Log format to use. Options are csv, json (default "csv") - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs + --log-format string Log format to use. Options are text or json (default "text") + --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-no-color Disable colored log output + --log-output string Log output path. Options are stderr or stdout. (default "stderr") + --log-overrides string Logger config overrides. Format ,=,...;,... + --log-source Include source location in logs + --log-stacktrace Include stacktrace in error and fatal logs --max-txn-retries int Specify the maximum number of retries per transaction (default 5) --no-p2p Disable the peer-to-peer network synchronization system --p2paddr strings Listen addresses for the p2p network (formatted as a libp2p MultiAddr) (default [/ip4/127.0.0.1/tcp/9171]) @@ -38,6 +41,7 @@ Execute queries, add schema types, obtain node info, etc. ### SEE ALSO * [defradb](defradb.md) - DefraDB Edge Database +* [defradb client acp](defradb_client_acp.md) - Interact with the access control system of a DefraDB node * [defradb client backup](defradb_client_backup.md) - Interact with the backup utility * [defradb client collection](defradb_client_collection.md) - Interact with a collection. * [defradb client dump](defradb_client_dump.md) - Dump the contents of DefraDB node-side diff --git a/docs/cli/defradb_client_acp.md b/docs/cli/defradb_client_acp.md new file mode 100644 index 0000000000..d3f57ae230 --- /dev/null +++ b/docs/cli/defradb_client_acp.md @@ -0,0 +1,48 @@ +## defradb client acp + +Interact with the access control system of a DefraDB node + +### Synopsis + +Interact with the access control system of a DefraDB node + +Learn more about [ACP](/acp/README.md) + + + +### Options + +``` + -h, --help help for acp +``` + +### Options inherited from parent commands + +``` + --allowed-origins stringArray List of origins to allow for CORS requests + -i, --identity string ACP Identity + --log-format string Log format to use. Options are text or json (default "text") + --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-no-color Disable colored log output + --log-output string Log output path. Options are stderr or stdout. (default "stderr") + --log-overrides string Logger config overrides. Format ,=,...;,... + --log-source Include source location in logs + --log-stacktrace Include stacktrace in error and fatal logs + --max-txn-retries int Specify the maximum number of retries per transaction (default 5) + --no-p2p Disable the peer-to-peer network synchronization system + --p2paddr strings Listen addresses for the p2p network (formatted as a libp2p MultiAddr) (default [/ip4/127.0.0.1/tcp/9171]) + --peers stringArray List of peers to connect to + --privkeypath string Path to the private key for tls + --pubkeypath string Path to the public key for tls + --rootdir string Directory for persistent data (default: $HOME/.defradb) + --store string Specify the datastore to use (supported: badger, memory) (default "badger") + --tx uint Transaction ID + --url string URL of HTTP endpoint to listen on or connect to (default "127.0.0.1:9181") + --valuelogfilesize int Specify the datastore value log file size (in bytes). In memory size will be 2*valuelogfilesize (default 1073741824) +``` + +### SEE ALSO + +* [defradb client](defradb_client.md) - Interact with a DefraDB node +* [defradb client acp policy](defradb_client_acp_policy.md) - Interact with the acp policy features of DefraDB instance + diff --git a/docs/cli/defradb_client_acp_policy.md b/docs/cli/defradb_client_acp_policy.md new file mode 100644 index 0000000000..2e659a0eb4 --- /dev/null +++ b/docs/cli/defradb_client_acp_policy.md @@ -0,0 +1,44 @@ +## defradb client acp policy + +Interact with the acp policy features of DefraDB instance + +### Synopsis + +Interact with the acp policy features of DefraDB instance + +### Options + +``` + -h, --help help for policy +``` + +### Options inherited from parent commands + +``` + --allowed-origins stringArray List of origins to allow for CORS requests + -i, --identity string ACP Identity + --log-format string Log format to use. Options are text or json (default "text") + --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-no-color Disable colored log output + --log-output string Log output path. Options are stderr or stdout. (default "stderr") + --log-overrides string Logger config overrides. Format ,=,...;,... + --log-source Include source location in logs + --log-stacktrace Include stacktrace in error and fatal logs + --max-txn-retries int Specify the maximum number of retries per transaction (default 5) + --no-p2p Disable the peer-to-peer network synchronization system + --p2paddr strings Listen addresses for the p2p network (formatted as a libp2p MultiAddr) (default [/ip4/127.0.0.1/tcp/9171]) + --peers stringArray List of peers to connect to + --privkeypath string Path to the private key for tls + --pubkeypath string Path to the public key for tls + --rootdir string Directory for persistent data (default: $HOME/.defradb) + --store string Specify the datastore to use (supported: badger, memory) (default "badger") + --tx uint Transaction ID + --url string URL of HTTP endpoint to listen on or connect to (default "127.0.0.1:9181") + --valuelogfilesize int Specify the datastore value log file size (in bytes). In memory size will be 2*valuelogfilesize (default 1073741824) +``` + +### SEE ALSO + +* [defradb client acp](defradb_client_acp.md) - Interact with the access control system of a DefraDB node +* [defradb client acp policy add](defradb_client_acp_policy_add.md) - Add new policy + diff --git a/docs/cli/defradb_client_acp_policy_add.md b/docs/cli/defradb_client_acp_policy_add.md new file mode 100644 index 0000000000..f426909323 --- /dev/null +++ b/docs/cli/defradb_client_acp_policy_add.md @@ -0,0 +1,91 @@ +## defradb client acp policy add + +Add new policy + +### Synopsis + +Add new policy + +Notes: + - Can not add a policy without specifying an identity. + - ACP must be available (i.e. ACP can not be disabled). + - A non-DPI policy will be accepted (will be registered with acp system). + - But only a valid DPI policyID & resource can be specified on a schema. + - DPI validation happens when attempting to add a schema with '@policy'. + - Learn more about [ACP & DPI Rules](/acp/README.md) + +Example: add from an argument string: + defradb client acp policy add -i cosmos1f2djr7dl9vhrk3twt3xwqp09nhtzec9mdkf70j ' +description: A Valid DefraDB Policy Interface + +actor: + name: actor + +resources: + users: + permissions: + read: + expr: owner + reader + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor +' + +Example: add from file: + defradb client acp policy add -i cosmos17r39df0hdcrgnmmw4mvu7qgk5nu888c7uvv37y -f policy.yml + +Example: add from file, verbose flags: + defradb client acp policy add --identity cosmos1kpw734v54g0t0d8tcye8ee5jc3gld0tcr2q473 --file policy.yml + +Example: add from stdin: + cat policy.yml | defradb client acp policy add - + + + +``` +defradb client acp policy add [-i --identity] [policy] [flags] +``` + +### Options + +``` + -f, --file string File to load a policy from + -h, --help help for add +``` + +### Options inherited from parent commands + +``` + --allowed-origins stringArray List of origins to allow for CORS requests + -i, --identity string ACP Identity + --log-format string Log format to use. Options are text or json (default "text") + --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-no-color Disable colored log output + --log-output string Log output path. Options are stderr or stdout. (default "stderr") + --log-overrides string Logger config overrides. Format ,=,...;,... + --log-source Include source location in logs + --log-stacktrace Include stacktrace in error and fatal logs + --max-txn-retries int Specify the maximum number of retries per transaction (default 5) + --no-p2p Disable the peer-to-peer network synchronization system + --p2paddr strings Listen addresses for the p2p network (formatted as a libp2p MultiAddr) (default [/ip4/127.0.0.1/tcp/9171]) + --peers stringArray List of peers to connect to + --privkeypath string Path to the private key for tls + --pubkeypath string Path to the public key for tls + --rootdir string Directory for persistent data (default: $HOME/.defradb) + --store string Specify the datastore to use (supported: badger, memory) (default "badger") + --tx uint Transaction ID + --url string URL of HTTP endpoint to listen on or connect to (default "127.0.0.1:9181") + --valuelogfilesize int Specify the datastore value log file size (in bytes). In memory size will be 2*valuelogfilesize (default 1073741824) +``` + +### SEE ALSO + +* [defradb client acp policy](defradb_client_acp_policy.md) - Interact with the acp policy features of DefraDB instance + diff --git a/docs/cli/defradb_client_backup.md b/docs/cli/defradb_client_backup.md index a7c7ae453b..ffa879365c 100644 --- a/docs/cli/defradb_client_backup.md +++ b/docs/cli/defradb_client_backup.md @@ -17,11 +17,14 @@ Currently only supports JSON format. ``` --allowed-origins stringArray List of origins to allow for CORS requests - --logformat string Log format to use. Options are csv, json (default "csv") - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs + -i, --identity string ACP Identity + --log-format string Log format to use. Options are text or json (default "text") + --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-no-color Disable colored log output + --log-output string Log output path. Options are stderr or stdout. (default "stderr") + --log-overrides string Logger config overrides. Format ,=,...;,... + --log-source Include source location in logs + --log-stacktrace Include stacktrace in error and fatal logs --max-txn-retries int Specify the maximum number of retries per transaction (default 5) --no-p2p Disable the peer-to-peer network synchronization system --p2paddr strings Listen addresses for the p2p network (formatted as a libp2p MultiAddr) (default [/ip4/127.0.0.1/tcp/9171]) diff --git a/docs/cli/defradb_client_backup_export.md b/docs/cli/defradb_client_backup_export.md index 6992b120c6..fc05e8ee14 100644 --- a/docs/cli/defradb_client_backup_export.md +++ b/docs/cli/defradb_client_backup_export.md @@ -31,11 +31,14 @@ defradb client backup export [-c --collections | -p --pretty | -f --format] ,=,...;,... + --log-source Include source location in logs + --log-stacktrace Include stacktrace in error and fatal logs --max-txn-retries int Specify the maximum number of retries per transaction (default 5) --no-p2p Disable the peer-to-peer network synchronization system --p2paddr strings Listen addresses for the p2p network (formatted as a libp2p MultiAddr) (default [/ip4/127.0.0.1/tcp/9171]) diff --git a/docs/cli/defradb_client_backup_import.md b/docs/cli/defradb_client_backup_import.md index ad2d3a1117..373f5be89c 100644 --- a/docs/cli/defradb_client_backup_import.md +++ b/docs/cli/defradb_client_backup_import.md @@ -23,11 +23,14 @@ defradb client backup import [flags] ``` --allowed-origins stringArray List of origins to allow for CORS requests - --logformat string Log format to use. Options are csv, json (default "csv") - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs + -i, --identity string ACP Identity + --log-format string Log format to use. Options are text or json (default "text") + --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-no-color Disable colored log output + --log-output string Log output path. Options are stderr or stdout. (default "stderr") + --log-overrides string Logger config overrides. Format ,=,...;,... + --log-source Include source location in logs + --log-stacktrace Include stacktrace in error and fatal logs --max-txn-retries int Specify the maximum number of retries per transaction (default 5) --no-p2p Disable the peer-to-peer network synchronization system --p2paddr strings Listen addresses for the p2p network (formatted as a libp2p MultiAddr) (default [/ip4/127.0.0.1/tcp/9171]) diff --git a/docs/cli/defradb_client_collection.md b/docs/cli/defradb_client_collection.md index 593e2d01ee..59faa94f78 100644 --- a/docs/cli/defradb_client_collection.md +++ b/docs/cli/defradb_client_collection.md @@ -9,23 +9,26 @@ Create, read, update, and delete documents within a collection. ### Options ``` - --get-inactive Get inactive collections as well as active - -h, --help help for collection - --name string Collection name - --schema string Collection schema Root - --tx uint Transaction ID - --version string Collection version ID + --get-inactive Get inactive collections as well as active + -h, --help help for collection + -i, --identity string ACP Identity + --name string Collection name + --schema string Collection schema Root + --tx uint Transaction ID + --version string Collection version ID ``` ### Options inherited from parent commands ``` --allowed-origins stringArray List of origins to allow for CORS requests - --logformat string Log format to use. Options are csv, json (default "csv") - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs + --log-format string Log format to use. Options are text or json (default "text") + --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-no-color Disable colored log output + --log-output string Log output path. Options are stderr or stdout. (default "stderr") + --log-overrides string Logger config overrides. Format ,=,...;,... + --log-source Include source location in logs + --log-stacktrace Include stacktrace in error and fatal logs --max-txn-retries int Specify the maximum number of retries per transaction (default 5) --no-p2p Disable the peer-to-peer network synchronization system --p2paddr strings Listen addresses for the p2p network (formatted as a libp2p MultiAddr) (default [/ip4/127.0.0.1/tcp/9171]) @@ -46,5 +49,6 @@ Create, read, update, and delete documents within a collection. * [defradb client collection describe](defradb_client_collection_describe.md) - View collection description. * [defradb client collection docIDs](defradb_client_collection_docIDs.md) - List all document IDs (docIDs). * [defradb client collection get](defradb_client_collection_get.md) - View document fields. +* [defradb client collection patch](defradb_client_collection_patch.md) - Patch existing collection descriptions * [defradb client collection update](defradb_client_collection_update.md) - Update documents by docID or filter. diff --git a/docs/cli/defradb_client_collection_create.md b/docs/cli/defradb_client_collection_create.md index 7c2cba7487..b565c2a547 100644 --- a/docs/cli/defradb_client_collection_create.md +++ b/docs/cli/defradb_client_collection_create.md @@ -6,21 +6,24 @@ Create a new document. Create a new document. -Example: create from string +Example: create from string: defradb client collection create --name User '{ "name": "Bob" }' -Example: create multiple from string +Example: create from string, with identity: + defradb client collection create -i cosmos1f2djr7dl9vhrk3twt3xwqp09nhtzec9mdkf70j --name User '{ "name": "Bob" }' + +Example: create multiple from string: defradb client collection create --name User '[{ "name": "Alice" }, { "name": "Bob" }]' -Example: create from file +Example: create from file: defradb client collection create --name User -f document.json -Example: create from stdin +Example: create from stdin: cat document.json | defradb client collection create --name User - ``` -defradb client collection create [flags] +defradb client collection create [-i --identity] [flags] ``` ### Options @@ -35,11 +38,14 @@ defradb client collection create [flags] ``` --allowed-origins stringArray List of origins to allow for CORS requests --get-inactive Get inactive collections as well as active - --logformat string Log format to use. Options are csv, json (default "csv") - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs + -i, --identity string ACP Identity + --log-format string Log format to use. Options are text or json (default "text") + --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-no-color Disable colored log output + --log-output string Log output path. Options are stderr or stdout. (default "stderr") + --log-overrides string Logger config overrides. Format ,=,...;,... + --log-source Include source location in logs + --log-stacktrace Include stacktrace in error and fatal logs --max-txn-retries int Specify the maximum number of retries per transaction (default 5) --name string Collection name --no-p2p Disable the peer-to-peer network synchronization system diff --git a/docs/cli/defradb_client_collection_delete.md b/docs/cli/defradb_client_collection_delete.md index 33a5af4809..2bca8d7d8a 100644 --- a/docs/cli/defradb_client_collection_delete.md +++ b/docs/cli/defradb_client_collection_delete.md @@ -6,21 +6,24 @@ Delete documents by docID or filter. Delete documents by docID or filter and lists the number of documents deleted. -Example: delete by docID(s) - defradb client collection delete --name User --docID bae-123,bae-456 +Example: delete by docID: + defradb client collection delete --name User --docID bae-123 -Example: delete by filter +Example: delete by docID with identity: + defradb client collection delete -i cosmos1f2djr7dl9vhrk3twt3xwqp09nhtzec9mdkf70j --name User --docID bae-123 + +Example: delete by filter: defradb client collection delete --name User --filter '{ "_gte": { "points": 100 } }' ``` -defradb client collection delete [--filter --docID ] [flags] +defradb client collection delete [-i --identity] [--filter --docID ] [flags] ``` ### Options ``` - --docID strings Document ID + --docID string Document ID --filter string Document filter -h, --help help for delete ``` @@ -30,11 +33,14 @@ defradb client collection delete [--filter --docID ] [flags] ``` --allowed-origins stringArray List of origins to allow for CORS requests --get-inactive Get inactive collections as well as active - --logformat string Log format to use. Options are csv, json (default "csv") - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs + -i, --identity string ACP Identity + --log-format string Log format to use. Options are text or json (default "text") + --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-no-color Disable colored log output + --log-output string Log output path. Options are stderr or stdout. (default "stderr") + --log-overrides string Logger config overrides. Format ,=,...;,... + --log-source Include source location in logs + --log-stacktrace Include stacktrace in error and fatal logs --max-txn-retries int Specify the maximum number of retries per transaction (default 5) --name string Collection name --no-p2p Disable the peer-to-peer network synchronization system diff --git a/docs/cli/defradb_client_collection_describe.md b/docs/cli/defradb_client_collection_describe.md index 46e8623d6a..bea05a1321 100644 --- a/docs/cli/defradb_client_collection_describe.md +++ b/docs/cli/defradb_client_collection_describe.md @@ -37,11 +37,14 @@ defradb client collection describe [flags] ``` --allowed-origins stringArray List of origins to allow for CORS requests - --logformat string Log format to use. Options are csv, json (default "csv") - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs + -i, --identity string ACP Identity + --log-format string Log format to use. Options are text or json (default "text") + --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-no-color Disable colored log output + --log-output string Log output path. Options are stderr or stdout. (default "stderr") + --log-overrides string Logger config overrides. Format ,=,...;,... + --log-source Include source location in logs + --log-stacktrace Include stacktrace in error and fatal logs --max-txn-retries int Specify the maximum number of retries per transaction (default 5) --no-p2p Disable the peer-to-peer network synchronization system --p2paddr strings Listen addresses for the p2p network (formatted as a libp2p MultiAddr) (default [/ip4/127.0.0.1/tcp/9171]) diff --git a/docs/cli/defradb_client_collection_docIDs.md b/docs/cli/defradb_client_collection_docIDs.md index c976d05417..1cf1a8444a 100644 --- a/docs/cli/defradb_client_collection_docIDs.md +++ b/docs/cli/defradb_client_collection_docIDs.md @@ -6,12 +6,15 @@ List all document IDs (docIDs). List all document IDs (docIDs). -Example: +Example: list all docID(s): defradb client collection docIDs --name User + +Example: list all docID(s), with an identity: + defradb client collection docIDs -i cosmos1f2djr7dl9vhrk3twt3xwqp09nhtzec9mdkf70j --name User ``` -defradb client collection docIDs [flags] +defradb client collection docIDs [-i --identity] [flags] ``` ### Options @@ -25,11 +28,14 @@ defradb client collection docIDs [flags] ``` --allowed-origins stringArray List of origins to allow for CORS requests --get-inactive Get inactive collections as well as active - --logformat string Log format to use. Options are csv, json (default "csv") - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs + -i, --identity string ACP Identity + --log-format string Log format to use. Options are text or json (default "text") + --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-no-color Disable colored log output + --log-output string Log output path. Options are stderr or stdout. (default "stderr") + --log-overrides string Logger config overrides. Format ,=,...;,... + --log-source Include source location in logs + --log-stacktrace Include stacktrace in error and fatal logs --max-txn-retries int Specify the maximum number of retries per transaction (default 5) --name string Collection name --no-p2p Disable the peer-to-peer network synchronization system diff --git a/docs/cli/defradb_client_collection_get.md b/docs/cli/defradb_client_collection_get.md index c2aeac17b3..7b80a2a54b 100644 --- a/docs/cli/defradb_client_collection_get.md +++ b/docs/cli/defradb_client_collection_get.md @@ -8,10 +8,13 @@ View document fields. Example: defradb client collection get --name User bae-123 + +Example to get a private document we must use an identity: + defradb client collection get -i cosmos1f2djr7dl9vhrk3twt3xwqp09nhtzec9mdkf70j --name User bae-123 ``` -defradb client collection get [--show-deleted] [flags] +defradb client collection get [-i --identity] [--show-deleted] [flags] ``` ### Options @@ -26,11 +29,14 @@ defradb client collection get [--show-deleted] [flags] ``` --allowed-origins stringArray List of origins to allow for CORS requests --get-inactive Get inactive collections as well as active - --logformat string Log format to use. Options are csv, json (default "csv") - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs + -i, --identity string ACP Identity + --log-format string Log format to use. Options are text or json (default "text") + --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-no-color Disable colored log output + --log-output string Log output path. Options are stderr or stdout. (default "stderr") + --log-overrides string Logger config overrides. Format ,=,...;,... + --log-source Include source location in logs + --log-stacktrace Include stacktrace in error and fatal logs --max-txn-retries int Specify the maximum number of retries per transaction (default 5) --name string Collection name --no-p2p Disable the peer-to-peer network synchronization system diff --git a/docs/cli/defradb_client_collection_patch.md b/docs/cli/defradb_client_collection_patch.md new file mode 100644 index 0000000000..c8540aa397 --- /dev/null +++ b/docs/cli/defradb_client_collection_patch.md @@ -0,0 +1,65 @@ +## defradb client collection patch + +Patch existing collection descriptions + +### Synopsis + +Patch existing collection descriptions. + +Uses JSON Patch to modify collection descriptions. + +Example: patch from an argument string: + defradb client collection patch '[{ "op": "add", "path": "...", "value": {...} }]' + +Example: patch from file: + defradb client collection patch -p patch.json + +Example: patch from stdin: + cat patch.json | defradb client collection patch - + +To learn more about the DefraDB GraphQL Schema Language, refer to https://docs.source.network. + +``` +defradb client collection patch [patch] [flags] +``` + +### Options + +``` + -h, --help help for patch + -p, --patch-file string File to load a patch from +``` + +### Options inherited from parent commands + +``` + --allowed-origins stringArray List of origins to allow for CORS requests + --get-inactive Get inactive collections as well as active + -i, --identity string ACP Identity + --log-format string Log format to use. Options are text or json (default "text") + --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-no-color Disable colored log output + --log-output string Log output path. Options are stderr or stdout. (default "stderr") + --log-overrides string Logger config overrides. Format ,=,...;,... + --log-source Include source location in logs + --log-stacktrace Include stacktrace in error and fatal logs + --max-txn-retries int Specify the maximum number of retries per transaction (default 5) + --name string Collection name + --no-p2p Disable the peer-to-peer network synchronization system + --p2paddr strings Listen addresses for the p2p network (formatted as a libp2p MultiAddr) (default [/ip4/127.0.0.1/tcp/9171]) + --peers stringArray List of peers to connect to + --privkeypath string Path to the private key for tls + --pubkeypath string Path to the public key for tls + --rootdir string Directory for persistent data (default: $HOME/.defradb) + --schema string Collection schema Root + --store string Specify the datastore to use (supported: badger, memory) (default "badger") + --tx uint Transaction ID + --url string URL of HTTP endpoint to listen on or connect to (default "127.0.0.1:9181") + --valuelogfilesize int Specify the datastore value log file size (in bytes). In memory size will be 2*valuelogfilesize (default 1073741824) + --version string Collection version ID +``` + +### SEE ALSO + +* [defradb client collection](defradb_client_collection.md) - Interact with a collection. + diff --git a/docs/cli/defradb_client_collection_update.md b/docs/cli/defradb_client_collection_update.md index 1200cc5b3e..ab6b8999b0 100644 --- a/docs/cli/defradb_client_collection_update.md +++ b/docs/cli/defradb_client_collection_update.md @@ -6,26 +6,30 @@ Update documents by docID or filter. Update documents by docID or filter. -Example: update from string +Example: update from string: defradb client collection update --name User --docID bae-123 '{ "name": "Bob" }' -Example: update by filter +Example: update by filter: defradb client collection update --name User \ --filter '{ "_gte": { "points": 100 } }' --updater '{ "verified": true }' -Example: update by docIDs +Example: update by docID: defradb client collection update --name User \ - --docID bae-123,bae-456 --updater '{ "verified": true }' + --docID bae-123 --updater '{ "verified": true }' + +Example: update private docID, with identity: + defradb client collection update -i cosmos1f2djr7dl9vhrk3twt3xwqp09nhtzec9mdkf70j --name User \ + --docID bae-123 --updater '{ "verified": true }' ``` -defradb client collection update [--filter --docID --updater ] [flags] +defradb client collection update [-i --identity] [--filter --docID --updater ] [flags] ``` ### Options ``` - --docID strings Document ID + --docID string Document ID --filter string Document filter -h, --help help for update --updater string Document updater @@ -36,11 +40,14 @@ defradb client collection update [--filter --docID --updater ,=,...;,... + --log-source Include source location in logs + --log-stacktrace Include stacktrace in error and fatal logs --max-txn-retries int Specify the maximum number of retries per transaction (default 5) --name string Collection name --no-p2p Disable the peer-to-peer network synchronization system diff --git a/docs/cli/defradb_client_dump.md b/docs/cli/defradb_client_dump.md index bc00e292b9..a819df1514 100644 --- a/docs/cli/defradb_client_dump.md +++ b/docs/cli/defradb_client_dump.md @@ -16,11 +16,14 @@ defradb client dump [flags] ``` --allowed-origins stringArray List of origins to allow for CORS requests - --logformat string Log format to use. Options are csv, json (default "csv") - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs + -i, --identity string ACP Identity + --log-format string Log format to use. Options are text or json (default "text") + --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-no-color Disable colored log output + --log-output string Log output path. Options are stderr or stdout. (default "stderr") + --log-overrides string Logger config overrides. Format ,=,...;,... + --log-source Include source location in logs + --log-stacktrace Include stacktrace in error and fatal logs --max-txn-retries int Specify the maximum number of retries per transaction (default 5) --no-p2p Disable the peer-to-peer network synchronization system --p2paddr strings Listen addresses for the p2p network (formatted as a libp2p MultiAddr) (default [/ip4/127.0.0.1/tcp/9171]) diff --git a/docs/cli/defradb_client_index.md b/docs/cli/defradb_client_index.md index 0dab1de7fe..bb59a6373b 100644 --- a/docs/cli/defradb_client_index.md +++ b/docs/cli/defradb_client_index.md @@ -16,11 +16,14 @@ Manage (create, drop, or list) collection indexes on a DefraDB node. ``` --allowed-origins stringArray List of origins to allow for CORS requests - --logformat string Log format to use. Options are csv, json (default "csv") - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs + -i, --identity string ACP Identity + --log-format string Log format to use. Options are text or json (default "text") + --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-no-color Disable colored log output + --log-output string Log output path. Options are stderr or stdout. (default "stderr") + --log-overrides string Logger config overrides. Format ,=,...;,... + --log-source Include source location in logs + --log-stacktrace Include stacktrace in error and fatal logs --max-txn-retries int Specify the maximum number of retries per transaction (default 5) --no-p2p Disable the peer-to-peer network synchronization system --p2paddr strings Listen addresses for the p2p network (formatted as a libp2p MultiAddr) (default [/ip4/127.0.0.1/tcp/9171]) diff --git a/docs/cli/defradb_client_index_create.md b/docs/cli/defradb_client_index_create.md index cbdbbe1d50..8c365e348e 100644 --- a/docs/cli/defradb_client_index_create.md +++ b/docs/cli/defradb_client_index_create.md @@ -33,11 +33,14 @@ defradb client index create -c --collection --fields [-n - ``` --allowed-origins stringArray List of origins to allow for CORS requests - --logformat string Log format to use. Options are csv, json (default "csv") - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs + -i, --identity string ACP Identity + --log-format string Log format to use. Options are text or json (default "text") + --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-no-color Disable colored log output + --log-output string Log output path. Options are stderr or stdout. (default "stderr") + --log-overrides string Logger config overrides. Format ,=,...;,... + --log-source Include source location in logs + --log-stacktrace Include stacktrace in error and fatal logs --max-txn-retries int Specify the maximum number of retries per transaction (default 5) --no-p2p Disable the peer-to-peer network synchronization system --p2paddr strings Listen addresses for the p2p network (formatted as a libp2p MultiAddr) (default [/ip4/127.0.0.1/tcp/9171]) diff --git a/docs/cli/defradb_client_index_drop.md b/docs/cli/defradb_client_index_drop.md index bb9e6ec30a..03b206c6cb 100644 --- a/docs/cli/defradb_client_index_drop.md +++ b/docs/cli/defradb_client_index_drop.md @@ -25,11 +25,14 @@ defradb client index drop -c --collection -n --name [flags] ``` --allowed-origins stringArray List of origins to allow for CORS requests - --logformat string Log format to use. Options are csv, json (default "csv") - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs + -i, --identity string ACP Identity + --log-format string Log format to use. Options are text or json (default "text") + --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-no-color Disable colored log output + --log-output string Log output path. Options are stderr or stdout. (default "stderr") + --log-overrides string Logger config overrides. Format ,=,...;,... + --log-source Include source location in logs + --log-stacktrace Include stacktrace in error and fatal logs --max-txn-retries int Specify the maximum number of retries per transaction (default 5) --no-p2p Disable the peer-to-peer network synchronization system --p2paddr strings Listen addresses for the p2p network (formatted as a libp2p MultiAddr) (default [/ip4/127.0.0.1/tcp/9171]) diff --git a/docs/cli/defradb_client_index_list.md b/docs/cli/defradb_client_index_list.md index a2d7ca8dd0..3c776f73ac 100644 --- a/docs/cli/defradb_client_index_list.md +++ b/docs/cli/defradb_client_index_list.md @@ -27,11 +27,14 @@ defradb client index list [-c --collection ] [flags] ``` --allowed-origins stringArray List of origins to allow for CORS requests - --logformat string Log format to use. Options are csv, json (default "csv") - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs + -i, --identity string ACP Identity + --log-format string Log format to use. Options are text or json (default "text") + --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-no-color Disable colored log output + --log-output string Log output path. Options are stderr or stdout. (default "stderr") + --log-overrides string Logger config overrides. Format ,=,...;,... + --log-source Include source location in logs + --log-stacktrace Include stacktrace in error and fatal logs --max-txn-retries int Specify the maximum number of retries per transaction (default 5) --no-p2p Disable the peer-to-peer network synchronization system --p2paddr strings Listen addresses for the p2p network (formatted as a libp2p MultiAddr) (default [/ip4/127.0.0.1/tcp/9171]) diff --git a/docs/cli/defradb_client_p2p.md b/docs/cli/defradb_client_p2p.md index 171e2ab661..2506208717 100644 --- a/docs/cli/defradb_client_p2p.md +++ b/docs/cli/defradb_client_p2p.md @@ -16,11 +16,14 @@ Interact with the DefraDB P2P system ``` --allowed-origins stringArray List of origins to allow for CORS requests - --logformat string Log format to use. Options are csv, json (default "csv") - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs + -i, --identity string ACP Identity + --log-format string Log format to use. Options are text or json (default "text") + --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-no-color Disable colored log output + --log-output string Log output path. Options are stderr or stdout. (default "stderr") + --log-overrides string Logger config overrides. Format ,=,...;,... + --log-source Include source location in logs + --log-stacktrace Include stacktrace in error and fatal logs --max-txn-retries int Specify the maximum number of retries per transaction (default 5) --no-p2p Disable the peer-to-peer network synchronization system --p2paddr strings Listen addresses for the p2p network (formatted as a libp2p MultiAddr) (default [/ip4/127.0.0.1/tcp/9171]) diff --git a/docs/cli/defradb_client_p2p_collection.md b/docs/cli/defradb_client_p2p_collection.md index 11ace67212..a1de966445 100644 --- a/docs/cli/defradb_client_p2p_collection.md +++ b/docs/cli/defradb_client_p2p_collection.md @@ -17,11 +17,14 @@ The selected collections synchronize their events on the pubsub network. ``` --allowed-origins stringArray List of origins to allow for CORS requests - --logformat string Log format to use. Options are csv, json (default "csv") - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs + -i, --identity string ACP Identity + --log-format string Log format to use. Options are text or json (default "text") + --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-no-color Disable colored log output + --log-output string Log output path. Options are stderr or stdout. (default "stderr") + --log-overrides string Logger config overrides. Format ,=,...;,... + --log-source Include source location in logs + --log-stacktrace Include stacktrace in error and fatal logs --max-txn-retries int Specify the maximum number of retries per transaction (default 5) --no-p2p Disable the peer-to-peer network synchronization system --p2paddr strings Listen addresses for the p2p network (formatted as a libp2p MultiAddr) (default [/ip4/127.0.0.1/tcp/9171]) diff --git a/docs/cli/defradb_client_p2p_collection_add.md b/docs/cli/defradb_client_p2p_collection_add.md index c54f235a60..01bc79ca0f 100644 --- a/docs/cli/defradb_client_p2p_collection_add.md +++ b/docs/cli/defradb_client_p2p_collection_add.md @@ -28,11 +28,14 @@ defradb client p2p collection add [collectionIDs] [flags] ``` --allowed-origins stringArray List of origins to allow for CORS requests - --logformat string Log format to use. Options are csv, json (default "csv") - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs + -i, --identity string ACP Identity + --log-format string Log format to use. Options are text or json (default "text") + --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-no-color Disable colored log output + --log-output string Log output path. Options are stderr or stdout. (default "stderr") + --log-overrides string Logger config overrides. Format ,=,...;,... + --log-source Include source location in logs + --log-stacktrace Include stacktrace in error and fatal logs --max-txn-retries int Specify the maximum number of retries per transaction (default 5) --no-p2p Disable the peer-to-peer network synchronization system --p2paddr strings Listen addresses for the p2p network (formatted as a libp2p MultiAddr) (default [/ip4/127.0.0.1/tcp/9171]) diff --git a/docs/cli/defradb_client_p2p_collection_getall.md b/docs/cli/defradb_client_p2p_collection_getall.md index 07c536d716..8d10944ad2 100644 --- a/docs/cli/defradb_client_p2p_collection_getall.md +++ b/docs/cli/defradb_client_p2p_collection_getall.md @@ -21,11 +21,14 @@ defradb client p2p collection getall [flags] ``` --allowed-origins stringArray List of origins to allow for CORS requests - --logformat string Log format to use. Options are csv, json (default "csv") - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs + -i, --identity string ACP Identity + --log-format string Log format to use. Options are text or json (default "text") + --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-no-color Disable colored log output + --log-output string Log output path. Options are stderr or stdout. (default "stderr") + --log-overrides string Logger config overrides. Format ,=,...;,... + --log-source Include source location in logs + --log-stacktrace Include stacktrace in error and fatal logs --max-txn-retries int Specify the maximum number of retries per transaction (default 5) --no-p2p Disable the peer-to-peer network synchronization system --p2paddr strings Listen addresses for the p2p network (formatted as a libp2p MultiAddr) (default [/ip4/127.0.0.1/tcp/9171]) diff --git a/docs/cli/defradb_client_p2p_collection_remove.md b/docs/cli/defradb_client_p2p_collection_remove.md index 5a8eb969b6..1cd6a14ee9 100644 --- a/docs/cli/defradb_client_p2p_collection_remove.md +++ b/docs/cli/defradb_client_p2p_collection_remove.md @@ -28,11 +28,14 @@ defradb client p2p collection remove [collectionIDs] [flags] ``` --allowed-origins stringArray List of origins to allow for CORS requests - --logformat string Log format to use. Options are csv, json (default "csv") - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs + -i, --identity string ACP Identity + --log-format string Log format to use. Options are text or json (default "text") + --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-no-color Disable colored log output + --log-output string Log output path. Options are stderr or stdout. (default "stderr") + --log-overrides string Logger config overrides. Format ,=,...;,... + --log-source Include source location in logs + --log-stacktrace Include stacktrace in error and fatal logs --max-txn-retries int Specify the maximum number of retries per transaction (default 5) --no-p2p Disable the peer-to-peer network synchronization system --p2paddr strings Listen addresses for the p2p network (formatted as a libp2p MultiAddr) (default [/ip4/127.0.0.1/tcp/9171]) diff --git a/docs/cli/defradb_client_p2p_info.md b/docs/cli/defradb_client_p2p_info.md index 27fdf7cb9b..385780ad3d 100644 --- a/docs/cli/defradb_client_p2p_info.md +++ b/docs/cli/defradb_client_p2p_info.md @@ -20,11 +20,14 @@ defradb client p2p info [flags] ``` --allowed-origins stringArray List of origins to allow for CORS requests - --logformat string Log format to use. Options are csv, json (default "csv") - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs + -i, --identity string ACP Identity + --log-format string Log format to use. Options are text or json (default "text") + --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-no-color Disable colored log output + --log-output string Log output path. Options are stderr or stdout. (default "stderr") + --log-overrides string Logger config overrides. Format ,=,...;,... + --log-source Include source location in logs + --log-stacktrace Include stacktrace in error and fatal logs --max-txn-retries int Specify the maximum number of retries per transaction (default 5) --no-p2p Disable the peer-to-peer network synchronization system --p2paddr strings Listen addresses for the p2p network (formatted as a libp2p MultiAddr) (default [/ip4/127.0.0.1/tcp/9171]) diff --git a/docs/cli/defradb_client_p2p_replicator.md b/docs/cli/defradb_client_p2p_replicator.md index 725845a726..b9d5b561c7 100644 --- a/docs/cli/defradb_client_p2p_replicator.md +++ b/docs/cli/defradb_client_p2p_replicator.md @@ -17,11 +17,14 @@ A replicator replicates one or all collection(s) from one node to another. ``` --allowed-origins stringArray List of origins to allow for CORS requests - --logformat string Log format to use. Options are csv, json (default "csv") - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs + -i, --identity string ACP Identity + --log-format string Log format to use. Options are text or json (default "text") + --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-no-color Disable colored log output + --log-output string Log output path. Options are stderr or stdout. (default "stderr") + --log-overrides string Logger config overrides. Format ,=,...;,... + --log-source Include source location in logs + --log-stacktrace Include stacktrace in error and fatal logs --max-txn-retries int Specify the maximum number of retries per transaction (default 5) --no-p2p Disable the peer-to-peer network synchronization system --p2paddr strings Listen addresses for the p2p network (formatted as a libp2p MultiAddr) (default [/ip4/127.0.0.1/tcp/9171]) diff --git a/docs/cli/defradb_client_p2p_replicator_delete.md b/docs/cli/defradb_client_p2p_replicator_delete.md index ef89979be6..93e5ff6d95 100644 --- a/docs/cli/defradb_client_p2p_replicator_delete.md +++ b/docs/cli/defradb_client_p2p_replicator_delete.md @@ -26,11 +26,14 @@ defradb client p2p replicator delete [-c, --collection] [flags] ``` --allowed-origins stringArray List of origins to allow for CORS requests - --logformat string Log format to use. Options are csv, json (default "csv") - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs + -i, --identity string ACP Identity + --log-format string Log format to use. Options are text or json (default "text") + --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-no-color Disable colored log output + --log-output string Log output path. Options are stderr or stdout. (default "stderr") + --log-overrides string Logger config overrides. Format ,=,...;,... + --log-source Include source location in logs + --log-stacktrace Include stacktrace in error and fatal logs --max-txn-retries int Specify the maximum number of retries per transaction (default 5) --no-p2p Disable the peer-to-peer network synchronization system --p2paddr strings Listen addresses for the p2p network (formatted as a libp2p MultiAddr) (default [/ip4/127.0.0.1/tcp/9171]) diff --git a/docs/cli/defradb_client_p2p_replicator_getall.md b/docs/cli/defradb_client_p2p_replicator_getall.md index 4d33b5243f..cc9cc1ed63 100644 --- a/docs/cli/defradb_client_p2p_replicator_getall.md +++ b/docs/cli/defradb_client_p2p_replicator_getall.md @@ -25,11 +25,14 @@ defradb client p2p replicator getall [flags] ``` --allowed-origins stringArray List of origins to allow for CORS requests - --logformat string Log format to use. Options are csv, json (default "csv") - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs + -i, --identity string ACP Identity + --log-format string Log format to use. Options are text or json (default "text") + --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-no-color Disable colored log output + --log-output string Log output path. Options are stderr or stdout. (default "stderr") + --log-overrides string Logger config overrides. Format ,=,...;,... + --log-source Include source location in logs + --log-stacktrace Include stacktrace in error and fatal logs --max-txn-retries int Specify the maximum number of retries per transaction (default 5) --no-p2p Disable the peer-to-peer network synchronization system --p2paddr strings Listen addresses for the p2p network (formatted as a libp2p MultiAddr) (default [/ip4/127.0.0.1/tcp/9171]) diff --git a/docs/cli/defradb_client_p2p_replicator_set.md b/docs/cli/defradb_client_p2p_replicator_set.md index 55654ded0f..4fbc980a7c 100644 --- a/docs/cli/defradb_client_p2p_replicator_set.md +++ b/docs/cli/defradb_client_p2p_replicator_set.md @@ -26,11 +26,14 @@ defradb client p2p replicator set [-c, --collection] [flags] ``` --allowed-origins stringArray List of origins to allow for CORS requests - --logformat string Log format to use. Options are csv, json (default "csv") - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs + -i, --identity string ACP Identity + --log-format string Log format to use. Options are text or json (default "text") + --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-no-color Disable colored log output + --log-output string Log output path. Options are stderr or stdout. (default "stderr") + --log-overrides string Logger config overrides. Format ,=,...;,... + --log-source Include source location in logs + --log-stacktrace Include stacktrace in error and fatal logs --max-txn-retries int Specify the maximum number of retries per transaction (default 5) --no-p2p Disable the peer-to-peer network synchronization system --p2paddr strings Listen addresses for the p2p network (formatted as a libp2p MultiAddr) (default [/ip4/127.0.0.1/tcp/9171]) diff --git a/docs/cli/defradb_client_query.md b/docs/cli/defradb_client_query.md index b23bf50553..493acca2d4 100644 --- a/docs/cli/defradb_client_query.md +++ b/docs/cli/defradb_client_query.md @@ -12,6 +12,9 @@ A query request can be sent as a single argument. Example command: Do a query request from a file by using the '-f' flag. Example command: defradb client query -f request.graphql +Do a query request from a file and with an identity. Example command: + defradb client query -i cosmos1f2djr7dl9vhrk3twt3xwqp09nhtzec9mdkf70j -f request.graphql + Or it can be sent via stdin by using the '-' special syntax. Example command: cat request.graphql | defradb client query - @@ -21,7 +24,7 @@ with the database more conveniently. To learn more about the DefraDB GraphQL Query Language, refer to https://docs.source.network. ``` -defradb client query [query request] [flags] +defradb client query [-i --identity] [request] [flags] ``` ### Options @@ -35,11 +38,14 @@ defradb client query [query request] [flags] ``` --allowed-origins stringArray List of origins to allow for CORS requests - --logformat string Log format to use. Options are csv, json (default "csv") - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs + -i, --identity string ACP Identity + --log-format string Log format to use. Options are text or json (default "text") + --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-no-color Disable colored log output + --log-output string Log output path. Options are stderr or stdout. (default "stderr") + --log-overrides string Logger config overrides. Format ,=,...;,... + --log-source Include source location in logs + --log-stacktrace Include stacktrace in error and fatal logs --max-txn-retries int Specify the maximum number of retries per transaction (default 5) --no-p2p Disable the peer-to-peer network synchronization system --p2paddr strings Listen addresses for the p2p network (formatted as a libp2p MultiAddr) (default [/ip4/127.0.0.1/tcp/9171]) diff --git a/docs/cli/defradb_client_schema.md b/docs/cli/defradb_client_schema.md index d37251c8db..2e144a89e6 100644 --- a/docs/cli/defradb_client_schema.md +++ b/docs/cli/defradb_client_schema.md @@ -16,11 +16,14 @@ Make changes, updates, or look for existing schema types. ``` --allowed-origins stringArray List of origins to allow for CORS requests - --logformat string Log format to use. Options are csv, json (default "csv") - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs + -i, --identity string ACP Identity + --log-format string Log format to use. Options are text or json (default "text") + --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-no-color Disable colored log output + --log-output string Log output path. Options are stderr or stdout. (default "stderr") + --log-overrides string Logger config overrides. Format ,=,...;,... + --log-source Include source location in logs + --log-stacktrace Include stacktrace in error and fatal logs --max-txn-retries int Specify the maximum number of retries per transaction (default 5) --no-p2p Disable the peer-to-peer network synchronization system --p2paddr strings Listen addresses for the p2p network (formatted as a libp2p MultiAddr) (default [/ip4/127.0.0.1/tcp/9171]) diff --git a/docs/cli/defradb_client_schema_add.md b/docs/cli/defradb_client_schema_add.md index e0ad675241..0ff3f683f4 100644 --- a/docs/cli/defradb_client_schema_add.md +++ b/docs/cli/defradb_client_schema_add.md @@ -6,6 +6,11 @@ Add new schema Add new schema. +Schema Object with a '@policy(id:".." resource: "..")' linked will only be accepted if: + - ACP is available (i.e. ACP is not disabled). + - The specified resource adheres to the Document Access Control DPI Rules. + - Learn more about [ACP & DPI Rules](/acp/README.md) + Example: add from an argument string: defradb client schema add 'type Foo { ... }' @@ -32,11 +37,14 @@ defradb client schema add [schema] [flags] ``` --allowed-origins stringArray List of origins to allow for CORS requests - --logformat string Log format to use. Options are csv, json (default "csv") - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs + -i, --identity string ACP Identity + --log-format string Log format to use. Options are text or json (default "text") + --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-no-color Disable colored log output + --log-output string Log output path. Options are stderr or stdout. (default "stderr") + --log-overrides string Logger config overrides. Format ,=,...;,... + --log-source Include source location in logs + --log-stacktrace Include stacktrace in error and fatal logs --max-txn-retries int Specify the maximum number of retries per transaction (default 5) --no-p2p Disable the peer-to-peer network synchronization system --p2paddr strings Listen addresses for the p2p network (formatted as a libp2p MultiAddr) (default [/ip4/127.0.0.1/tcp/9171]) diff --git a/docs/cli/defradb_client_schema_describe.md b/docs/cli/defradb_client_schema_describe.md index cd79cce3c1..0b28a1e64e 100644 --- a/docs/cli/defradb_client_schema_describe.md +++ b/docs/cli/defradb_client_schema_describe.md @@ -36,11 +36,14 @@ defradb client schema describe [flags] ``` --allowed-origins stringArray List of origins to allow for CORS requests - --logformat string Log format to use. Options are csv, json (default "csv") - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs + -i, --identity string ACP Identity + --log-format string Log format to use. Options are text or json (default "text") + --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-no-color Disable colored log output + --log-output string Log output path. Options are stderr or stdout. (default "stderr") + --log-overrides string Logger config overrides. Format ,=,...;,... + --log-source Include source location in logs + --log-stacktrace Include stacktrace in error and fatal logs --max-txn-retries int Specify the maximum number of retries per transaction (default 5) --no-p2p Disable the peer-to-peer network synchronization system --p2paddr strings Listen addresses for the p2p network (formatted as a libp2p MultiAddr) (default [/ip4/127.0.0.1/tcp/9171]) diff --git a/docs/cli/defradb_client_schema_migration.md b/docs/cli/defradb_client_schema_migration.md index b49420401c..c339763571 100644 --- a/docs/cli/defradb_client_schema_migration.md +++ b/docs/cli/defradb_client_schema_migration.md @@ -16,11 +16,14 @@ Make set or look for existing schema migrations on a DefraDB node. ``` --allowed-origins stringArray List of origins to allow for CORS requests - --logformat string Log format to use. Options are csv, json (default "csv") - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs + -i, --identity string ACP Identity + --log-format string Log format to use. Options are text or json (default "text") + --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-no-color Disable colored log output + --log-output string Log output path. Options are stderr or stdout. (default "stderr") + --log-overrides string Logger config overrides. Format ,=,...;,... + --log-source Include source location in logs + --log-stacktrace Include stacktrace in error and fatal logs --max-txn-retries int Specify the maximum number of retries per transaction (default 5) --no-p2p Disable the peer-to-peer network synchronization system --p2paddr strings Listen addresses for the p2p network (formatted as a libp2p MultiAddr) (default [/ip4/127.0.0.1/tcp/9171]) diff --git a/docs/cli/defradb_client_schema_migration_down.md b/docs/cli/defradb_client_schema_migration_down.md index 6172bf09b1..f741f5bec9 100644 --- a/docs/cli/defradb_client_schema_migration_down.md +++ b/docs/cli/defradb_client_schema_migration_down.md @@ -33,11 +33,14 @@ defradb client schema migration down --collection [fl ``` --allowed-origins stringArray List of origins to allow for CORS requests - --logformat string Log format to use. Options are csv, json (default "csv") - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs + -i, --identity string ACP Identity + --log-format string Log format to use. Options are text or json (default "text") + --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-no-color Disable colored log output + --log-output string Log output path. Options are stderr or stdout. (default "stderr") + --log-overrides string Logger config overrides. Format ,=,...;,... + --log-source Include source location in logs + --log-stacktrace Include stacktrace in error and fatal logs --max-txn-retries int Specify the maximum number of retries per transaction (default 5) --no-p2p Disable the peer-to-peer network synchronization system --p2paddr strings Listen addresses for the p2p network (formatted as a libp2p MultiAddr) (default [/ip4/127.0.0.1/tcp/9171]) diff --git a/docs/cli/defradb_client_schema_migration_get.md b/docs/cli/defradb_client_schema_migration_get.md deleted file mode 100644 index 20ed8edb91..0000000000 --- a/docs/cli/defradb_client_schema_migration_get.md +++ /dev/null @@ -1,41 +0,0 @@ -## defradb client schema migration get - -Gets the schema migrations within DefraDB - -### Synopsis - -Gets the schema migrations within the local DefraDB node. - -Example: - defradb client schema migration get' - -Learn more about the DefraDB GraphQL Schema Language on https://docs.source.network. - -``` -defradb client schema migration get [flags] -``` - -### Options - -``` - -h, --help help for get -``` - -### Options inherited from parent commands - -``` - --logformat string Log format to use. Options are csv, json (default "csv") - --logger stringArray Override logger parameters. Usage: --logger ,level=,output=,... - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs - --rootdir string Directory for data and configuration to use (default: $HOME/.defradb) - --tx uint Transaction ID - --url string URL of HTTP endpoint to listen on or connect to (default "localhost:9181") -``` - -### SEE ALSO - -* [defradb client schema migration](defradb_client_schema_migration.md) - Interact with the schema migration system of a running DefraDB instance - diff --git a/docs/cli/defradb_client_schema_migration_reload.md b/docs/cli/defradb_client_schema_migration_reload.md index 01051e419a..8a1d8480c0 100644 --- a/docs/cli/defradb_client_schema_migration_reload.md +++ b/docs/cli/defradb_client_schema_migration_reload.md @@ -20,11 +20,14 @@ defradb client schema migration reload [flags] ``` --allowed-origins stringArray List of origins to allow for CORS requests - --logformat string Log format to use. Options are csv, json (default "csv") - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs + -i, --identity string ACP Identity + --log-format string Log format to use. Options are text or json (default "text") + --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-no-color Disable colored log output + --log-output string Log output path. Options are stderr or stdout. (default "stderr") + --log-overrides string Logger config overrides. Format ,=,...;,... + --log-source Include source location in logs + --log-stacktrace Include stacktrace in error and fatal logs --max-txn-retries int Specify the maximum number of retries per transaction (default 5) --no-p2p Disable the peer-to-peer network synchronization system --p2paddr strings Listen addresses for the p2p network (formatted as a libp2p MultiAddr) (default [/ip4/127.0.0.1/tcp/9171]) diff --git a/docs/cli/defradb_client_schema_migration_set-registry.md b/docs/cli/defradb_client_schema_migration_set-registry.md index 8e80aa132d..ebb4c625c7 100644 --- a/docs/cli/defradb_client_schema_migration_set-registry.md +++ b/docs/cli/defradb_client_schema_migration_set-registry.md @@ -26,11 +26,14 @@ defradb client schema migration set-registry [collectionID] [cfg] [flags] ``` --allowed-origins stringArray List of origins to allow for CORS requests - --logformat string Log format to use. Options are csv, json (default "csv") - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs + -i, --identity string ACP Identity + --log-format string Log format to use. Options are text or json (default "text") + --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-no-color Disable colored log output + --log-output string Log output path. Options are stderr or stdout. (default "stderr") + --log-overrides string Logger config overrides. Format ,=,...;,... + --log-source Include source location in logs + --log-stacktrace Include stacktrace in error and fatal logs --max-txn-retries int Specify the maximum number of retries per transaction (default 5) --no-p2p Disable the peer-to-peer network synchronization system --p2paddr strings Listen addresses for the p2p network (formatted as a libp2p MultiAddr) (default [/ip4/127.0.0.1/tcp/9171]) diff --git a/docs/cli/defradb_client_schema_migration_set.md b/docs/cli/defradb_client_schema_migration_set.md index 9e6bcfcfc4..8386fd8369 100644 --- a/docs/cli/defradb_client_schema_migration_set.md +++ b/docs/cli/defradb_client_schema_migration_set.md @@ -33,11 +33,14 @@ defradb client schema migration set [src] [dst] [cfg] [flags] ``` --allowed-origins stringArray List of origins to allow for CORS requests - --logformat string Log format to use. Options are csv, json (default "csv") - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs + -i, --identity string ACP Identity + --log-format string Log format to use. Options are text or json (default "text") + --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-no-color Disable colored log output + --log-output string Log output path. Options are stderr or stdout. (default "stderr") + --log-overrides string Logger config overrides. Format ,=,...;,... + --log-source Include source location in logs + --log-stacktrace Include stacktrace in error and fatal logs --max-txn-retries int Specify the maximum number of retries per transaction (default 5) --no-p2p Disable the peer-to-peer network synchronization system --p2paddr strings Listen addresses for the p2p network (formatted as a libp2p MultiAddr) (default [/ip4/127.0.0.1/tcp/9171]) diff --git a/docs/cli/defradb_client_schema_migration_up.md b/docs/cli/defradb_client_schema_migration_up.md index bcd28453cf..b55ace45ad 100644 --- a/docs/cli/defradb_client_schema_migration_up.md +++ b/docs/cli/defradb_client_schema_migration_up.md @@ -33,11 +33,14 @@ defradb client schema migration up --collection [flag ``` --allowed-origins stringArray List of origins to allow for CORS requests - --logformat string Log format to use. Options are csv, json (default "csv") - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs + -i, --identity string ACP Identity + --log-format string Log format to use. Options are text or json (default "text") + --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-no-color Disable colored log output + --log-output string Log output path. Options are stderr or stdout. (default "stderr") + --log-overrides string Logger config overrides. Format ,=,...;,... + --log-source Include source location in logs + --log-stacktrace Include stacktrace in error and fatal logs --max-txn-retries int Specify the maximum number of retries per transaction (default 5) --no-p2p Disable the peer-to-peer network synchronization system --p2paddr strings Listen addresses for the p2p network (formatted as a libp2p MultiAddr) (default [/ip4/127.0.0.1/tcp/9171]) diff --git a/docs/cli/defradb_client_schema_patch.md b/docs/cli/defradb_client_schema_patch.md index f24670b945..7d16e632ae 100644 --- a/docs/cli/defradb_client_schema_patch.md +++ b/docs/cli/defradb_client_schema_patch.md @@ -12,7 +12,7 @@ Example: patch from an argument string: defradb client schema patch '[{ "op": "add", "path": "...", "value": {...} }]' '{"lenses": [...' Example: patch from file: - defradb client schema patch -f patch.json + defradb client schema patch -p patch.json Example: patch from stdin: cat patch.json | defradb client schema patch - @@ -36,11 +36,14 @@ defradb client schema patch [schema] [migration] [flags] ``` --allowed-origins stringArray List of origins to allow for CORS requests - --logformat string Log format to use. Options are csv, json (default "csv") - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs + -i, --identity string ACP Identity + --log-format string Log format to use. Options are text or json (default "text") + --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-no-color Disable colored log output + --log-output string Log output path. Options are stderr or stdout. (default "stderr") + --log-overrides string Logger config overrides. Format ,=,...;,... + --log-source Include source location in logs + --log-stacktrace Include stacktrace in error and fatal logs --max-txn-retries int Specify the maximum number of retries per transaction (default 5) --no-p2p Disable the peer-to-peer network synchronization system --p2paddr strings Listen addresses for the p2p network (formatted as a libp2p MultiAddr) (default [/ip4/127.0.0.1/tcp/9171]) diff --git a/docs/cli/defradb_client_schema_set-active.md b/docs/cli/defradb_client_schema_set-active.md index ff94ff88fe..7f7b4f4cd5 100644 --- a/docs/cli/defradb_client_schema_set-active.md +++ b/docs/cli/defradb_client_schema_set-active.md @@ -21,11 +21,14 @@ defradb client schema set-active [versionID] [flags] ``` --allowed-origins stringArray List of origins to allow for CORS requests - --logformat string Log format to use. Options are csv, json (default "csv") - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs + -i, --identity string ACP Identity + --log-format string Log format to use. Options are text or json (default "text") + --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-no-color Disable colored log output + --log-output string Log output path. Options are stderr or stdout. (default "stderr") + --log-overrides string Logger config overrides. Format ,=,...;,... + --log-source Include source location in logs + --log-stacktrace Include stacktrace in error and fatal logs --max-txn-retries int Specify the maximum number of retries per transaction (default 5) --no-p2p Disable the peer-to-peer network synchronization system --p2paddr strings Listen addresses for the p2p network (formatted as a libp2p MultiAddr) (default [/ip4/127.0.0.1/tcp/9171]) diff --git a/docs/cli/defradb_client_schema_set-default.md b/docs/cli/defradb_client_schema_set-default.md deleted file mode 100644 index 0698b0e6d5..0000000000 --- a/docs/cli/defradb_client_schema_set-default.md +++ /dev/null @@ -1,36 +0,0 @@ -## defradb client schema set-default - -Set the default schema version - -### Synopsis - -Set the default schema version - -``` -defradb client schema set-default [versionID] [flags] -``` - -### Options - -``` - -h, --help help for set-default -``` - -### Options inherited from parent commands - -``` - --logformat string Log format to use. Options are csv, json (default "csv") - --logger stringArray Override logger parameters. Usage: --logger ,level=,output=,... - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs - --rootdir string Directory for data and configuration to use (default: $HOME/.defradb) - --tx uint Transaction ID - --url string URL of HTTP endpoint to listen on or connect to (default "localhost:9181") -``` - -### SEE ALSO - -* [defradb client schema](defradb_client_schema.md) - Interact with the schema system of a DefraDB node - diff --git a/docs/cli/defradb_client_tx.md b/docs/cli/defradb_client_tx.md index 65f7740419..67bf63e2df 100644 --- a/docs/cli/defradb_client_tx.md +++ b/docs/cli/defradb_client_tx.md @@ -16,11 +16,14 @@ Create, commit, and discard DefraDB transactions ``` --allowed-origins stringArray List of origins to allow for CORS requests - --logformat string Log format to use. Options are csv, json (default "csv") - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs + -i, --identity string ACP Identity + --log-format string Log format to use. Options are text or json (default "text") + --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-no-color Disable colored log output + --log-output string Log output path. Options are stderr or stdout. (default "stderr") + --log-overrides string Logger config overrides. Format ,=,...;,... + --log-source Include source location in logs + --log-stacktrace Include stacktrace in error and fatal logs --max-txn-retries int Specify the maximum number of retries per transaction (default 5) --no-p2p Disable the peer-to-peer network synchronization system --p2paddr strings Listen addresses for the p2p network (formatted as a libp2p MultiAddr) (default [/ip4/127.0.0.1/tcp/9171]) diff --git a/docs/cli/defradb_client_tx_commit.md b/docs/cli/defradb_client_tx_commit.md index 621459e134..eba408dc57 100644 --- a/docs/cli/defradb_client_tx_commit.md +++ b/docs/cli/defradb_client_tx_commit.md @@ -20,11 +20,14 @@ defradb client tx commit [id] [flags] ``` --allowed-origins stringArray List of origins to allow for CORS requests - --logformat string Log format to use. Options are csv, json (default "csv") - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs + -i, --identity string ACP Identity + --log-format string Log format to use. Options are text or json (default "text") + --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-no-color Disable colored log output + --log-output string Log output path. Options are stderr or stdout. (default "stderr") + --log-overrides string Logger config overrides. Format ,=,...;,... + --log-source Include source location in logs + --log-stacktrace Include stacktrace in error and fatal logs --max-txn-retries int Specify the maximum number of retries per transaction (default 5) --no-p2p Disable the peer-to-peer network synchronization system --p2paddr strings Listen addresses for the p2p network (formatted as a libp2p MultiAddr) (default [/ip4/127.0.0.1/tcp/9171]) diff --git a/docs/cli/defradb_client_tx_create.md b/docs/cli/defradb_client_tx_create.md index cf695da6c7..26668e6ad2 100644 --- a/docs/cli/defradb_client_tx_create.md +++ b/docs/cli/defradb_client_tx_create.md @@ -22,11 +22,14 @@ defradb client tx create [flags] ``` --allowed-origins stringArray List of origins to allow for CORS requests - --logformat string Log format to use. Options are csv, json (default "csv") - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs + -i, --identity string ACP Identity + --log-format string Log format to use. Options are text or json (default "text") + --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-no-color Disable colored log output + --log-output string Log output path. Options are stderr or stdout. (default "stderr") + --log-overrides string Logger config overrides. Format ,=,...;,... + --log-source Include source location in logs + --log-stacktrace Include stacktrace in error and fatal logs --max-txn-retries int Specify the maximum number of retries per transaction (default 5) --no-p2p Disable the peer-to-peer network synchronization system --p2paddr strings Listen addresses for the p2p network (formatted as a libp2p MultiAddr) (default [/ip4/127.0.0.1/tcp/9171]) diff --git a/docs/cli/defradb_client_tx_discard.md b/docs/cli/defradb_client_tx_discard.md index 7340bedf2a..3989bc4c05 100644 --- a/docs/cli/defradb_client_tx_discard.md +++ b/docs/cli/defradb_client_tx_discard.md @@ -20,11 +20,14 @@ defradb client tx discard [id] [flags] ``` --allowed-origins stringArray List of origins to allow for CORS requests - --logformat string Log format to use. Options are csv, json (default "csv") - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs + -i, --identity string ACP Identity + --log-format string Log format to use. Options are text or json (default "text") + --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-no-color Disable colored log output + --log-output string Log output path. Options are stderr or stdout. (default "stderr") + --log-overrides string Logger config overrides. Format ,=,...;,... + --log-source Include source location in logs + --log-stacktrace Include stacktrace in error and fatal logs --max-txn-retries int Specify the maximum number of retries per transaction (default 5) --no-p2p Disable the peer-to-peer network synchronization system --p2paddr strings Listen addresses for the p2p network (formatted as a libp2p MultiAddr) (default [/ip4/127.0.0.1/tcp/9171]) diff --git a/docs/cli/defradb_client_view.md b/docs/cli/defradb_client_view.md index 9b93884430..09c5bab11b 100644 --- a/docs/cli/defradb_client_view.md +++ b/docs/cli/defradb_client_view.md @@ -16,11 +16,14 @@ Manage (add) views withing a running DefraDB instance ``` --allowed-origins stringArray List of origins to allow for CORS requests - --logformat string Log format to use. Options are csv, json (default "csv") - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs + -i, --identity string ACP Identity + --log-format string Log format to use. Options are text or json (default "text") + --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-no-color Disable colored log output + --log-output string Log output path. Options are stderr or stdout. (default "stderr") + --log-overrides string Logger config overrides. Format ,=,...;,... + --log-source Include source location in logs + --log-stacktrace Include stacktrace in error and fatal logs --max-txn-retries int Specify the maximum number of retries per transaction (default 5) --no-p2p Disable the peer-to-peer network synchronization system --p2paddr strings Listen addresses for the p2p network (formatted as a libp2p MultiAddr) (default [/ip4/127.0.0.1/tcp/9171]) diff --git a/docs/cli/defradb_client_view_add.md b/docs/cli/defradb_client_view_add.md index cdbab25a51..b671d8290c 100644 --- a/docs/cli/defradb_client_view_add.md +++ b/docs/cli/defradb_client_view_add.md @@ -26,11 +26,14 @@ defradb client view add [query] [sdl] [transform] [flags] ``` --allowed-origins stringArray List of origins to allow for CORS requests - --logformat string Log format to use. Options are csv, json (default "csv") - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs + -i, --identity string ACP Identity + --log-format string Log format to use. Options are text or json (default "text") + --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-no-color Disable colored log output + --log-output string Log output path. Options are stderr or stdout. (default "stderr") + --log-overrides string Logger config overrides. Format ,=,...;,... + --log-source Include source location in logs + --log-stacktrace Include stacktrace in error and fatal logs --max-txn-retries int Specify the maximum number of retries per transaction (default 5) --no-p2p Disable the peer-to-peer network synchronization system --p2paddr strings Listen addresses for the p2p network (formatted as a libp2p MultiAddr) (default [/ip4/127.0.0.1/tcp/9171]) diff --git a/docs/cli/defradb_init.md b/docs/cli/defradb_init.md deleted file mode 100644 index f8d69f5794..0000000000 --- a/docs/cli/defradb_init.md +++ /dev/null @@ -1,37 +0,0 @@ -## defradb init - -Initialize DefraDB's root directory and configuration file - -### Synopsis - -Initialize a directory for configuration and data at the given path. -Passed flags will be persisted in the stored configuration. - -``` -defradb init [flags] -``` - -### Options - -``` - -h, --help help for init - --reinitialize Reinitialize the configuration file -``` - -### Options inherited from parent commands - -``` - --logformat string Log format to use. Options are csv, json (default "csv") - --logger stringArray Override logger parameters. Usage: --logger ,level=,output=,... - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs - --rootdir string Directory for data and configuration to use (default: $HOME/.defradb) - --url string URL of HTTP endpoint to listen on or connect to (default "localhost:9181") -``` - -### SEE ALSO - -* [defradb](defradb.md) - DefraDB Edge Database - diff --git a/docs/cli/defradb_server-dump.md b/docs/cli/defradb_server-dump.md index 2b590da6fe..3651d32e9c 100644 --- a/docs/cli/defradb_server-dump.md +++ b/docs/cli/defradb_server-dump.md @@ -16,11 +16,13 @@ defradb server-dump [flags] ``` --allowed-origins stringArray List of origins to allow for CORS requests - --logformat string Log format to use. Options are csv, json (default "csv") - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs + --log-format string Log format to use. Options are text or json (default "text") + --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-no-color Disable colored log output + --log-output string Log output path. Options are stderr or stdout. (default "stderr") + --log-overrides string Logger config overrides. Format ,=,...;,... + --log-source Include source location in logs + --log-stacktrace Include stacktrace in error and fatal logs --max-txn-retries int Specify the maximum number of retries per transaction (default 5) --no-p2p Disable the peer-to-peer network synchronization system --p2paddr strings Listen addresses for the p2p network (formatted as a libp2p MultiAddr) (default [/ip4/127.0.0.1/tcp/9171]) diff --git a/docs/cli/defradb_start.md b/docs/cli/defradb_start.md index 2591f9bc06..e0f732cb04 100644 --- a/docs/cli/defradb_start.md +++ b/docs/cli/defradb_start.md @@ -20,11 +20,13 @@ defradb start [flags] ``` --allowed-origins stringArray List of origins to allow for CORS requests - --logformat string Log format to use. Options are csv, json (default "csv") - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs + --log-format string Log format to use. Options are text or json (default "text") + --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-no-color Disable colored log output + --log-output string Log output path. Options are stderr or stdout. (default "stderr") + --log-overrides string Logger config overrides. Format ,=,...;,... + --log-source Include source location in logs + --log-stacktrace Include stacktrace in error and fatal logs --max-txn-retries int Specify the maximum number of retries per transaction (default 5) --no-p2p Disable the peer-to-peer network synchronization system --p2paddr strings Listen addresses for the p2p network (formatted as a libp2p MultiAddr) (default [/ip4/127.0.0.1/tcp/9171]) diff --git a/docs/cli/defradb_version.md b/docs/cli/defradb_version.md index ce43eb148c..b4693fddbf 100644 --- a/docs/cli/defradb_version.md +++ b/docs/cli/defradb_version.md @@ -18,11 +18,13 @@ defradb version [flags] ``` --allowed-origins stringArray List of origins to allow for CORS requests - --logformat string Log format to use. Options are csv, json (default "csv") - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs + --log-format string Log format to use. Options are text or json (default "text") + --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-no-color Disable colored log output + --log-output string Log output path. Options are stderr or stdout. (default "stderr") + --log-overrides string Logger config overrides. Format ,=,...;,... + --log-source Include source location in logs + --log-stacktrace Include stacktrace in error and fatal logs --max-txn-retries int Specify the maximum number of retries per transaction (default 5) --no-p2p Disable the peer-to-peer network synchronization system --p2paddr strings Listen addresses for the p2p network (formatted as a libp2p MultiAddr) (default [/ip4/127.0.0.1/tcp/9171]) diff --git a/docs/config.md b/docs/config.md index 5f8985f71c..da46700bb7 100644 --- a/docs/config.md +++ b/docs/config.md @@ -63,4 +63,32 @@ https://docs.libp2p.io/concepts/addressing/ Enable libp2p's Circuit relay transport protocol. Defaults to `false`. -https://docs.libp2p.io/concepts/circuit-relay/ \ No newline at end of file +https://docs.libp2p.io/concepts/circuit-relay/ + +## `log.level` + +Log level to use. Options are `info` or `error`. Defaults to `info`. + +## `log.output` + +Log output path. Options are `stderr` or `stdout`. Defaults to `stderr`. + +## `log.format` + +Log format to use. Options are `text` or `json`. Defaults to `text`. + +## `log.stacktrace` + +Include stacktrace in error and fatal logs. Defaults to `false`. + +## `log.source` + +Include source location in logs. Defaults to `false`. + +## `log.overrides` + +Logger config overrides. Format `,=,...;,...`. + +## `log.nocolor` + +Disable colored log output. Defaults to `false`. diff --git a/docs/data_format_changes/i2409-unify-field-kind-and-schema.md b/docs/data_format_changes/i2409-unify-field-kind-and-schema.md new file mode 100644 index 0000000000..edda3e1e3e --- /dev/null +++ b/docs/data_format_changes/i2409-unify-field-kind-and-schema.md @@ -0,0 +1,3 @@ +# Unify Field Kind and Schema properties + +The client SchemaFieldDescription Kind and Schema properties have been unified, and FieldKind values 16 and 17 (foreign objects) have been replaced. diff --git a/docs/data_format_changes/i2451-rel-field-props-local.md b/docs/data_format_changes/i2451-rel-field-props-local.md new file mode 100644 index 0000000000..ad34cc8965 --- /dev/null +++ b/docs/data_format_changes/i2451-rel-field-props-local.md @@ -0,0 +1,3 @@ +# Move relation field properties onto collection + +Field RelationName and secondary relation fields has been made local, and moved off of the schema and onto collection. Field IsPrimary has been removed completely (from the schema). As a result schema root and schema version id are no longer dependent on them. diff --git a/encoding/field_value.go b/encoding/field_value.go index 9c8cd5589f..f62375a461 100644 --- a/encoding/field_value.go +++ b/encoding/field_value.go @@ -11,30 +11,20 @@ package encoding import ( - "golang.org/x/exp/constraints" - "github.com/sourcenetwork/defradb/client" ) -func encodeIntFieldValue[T constraints.Integer](b []byte, val T, descending bool) []byte { - if descending { - return EncodeVarintDescending(b, int64(val)) - } - return EncodeVarintAscending(b, int64(val)) -} - // EncodeFieldValue encodes a FieldValue into a byte slice. // The encoded value is appended to the supplied buffer and the resulting buffer is returned. -func EncodeFieldValue(b []byte, val any, descending bool) []byte { - if val == nil { +func EncodeFieldValue(b []byte, val client.NormalValue, descending bool) []byte { + if val.IsNil() { if descending { return EncodeNullDescending(b) } else { return EncodeNullAscending(b) } } - switch v := val.(type) { - case bool: + if v, ok := val.Bool(); ok { var boolInt int64 = 0 if v { boolInt = 1 @@ -43,35 +33,66 @@ func EncodeFieldValue(b []byte, val any, descending bool) []byte { return EncodeVarintDescending(b, boolInt) } return EncodeVarintAscending(b, boolInt) - case int: - return encodeIntFieldValue(b, v, descending) - case int32: - return encodeIntFieldValue(b, v, descending) - case int64: - return encodeIntFieldValue(b, v, descending) - case float64: + } + if v, ok := val.NillableBool(); ok { + var boolInt int64 = 0 + if v.Value() { + boolInt = 1 + } + if descending { + return EncodeVarintDescending(b, boolInt) + } + return EncodeVarintAscending(b, boolInt) + } + if v, ok := val.Int(); ok { + if descending { + return EncodeVarintDescending(b, v) + } + return EncodeVarintAscending(b, v) + } + if v, ok := val.NillableInt(); ok { + if descending { + return EncodeVarintDescending(b, v.Value()) + } + return EncodeVarintAscending(b, v.Value()) + } + if v, ok := val.Float(); ok { if descending { return EncodeFloatDescending(b, v) } return EncodeFloatAscending(b, v) - case string: + } + if v, ok := val.NillableFloat(); ok { + if descending { + return EncodeFloatDescending(b, v.Value()) + } + return EncodeFloatAscending(b, v.Value()) + } + if v, ok := val.String(); ok { if descending { return EncodeStringDescending(b, v) } return EncodeStringAscending(b, v) } + if v, ok := val.NillableString(); ok { + if descending { + return EncodeStringDescending(b, v.Value()) + } + return EncodeStringAscending(b, v.Value()) + } return b } -// DecodeFieldValue decodes a FieldValue from a byte slice. +// DecodeFieldValue decodes a field value from a byte slice. // The decoded value is returned along with the remaining byte slice. -func DecodeFieldValue(b []byte, descending bool) ([]byte, any, error) { +func DecodeFieldValue(b []byte, descending bool, kind client.FieldKind) ([]byte, client.NormalValue, error) { typ := PeekType(b) switch typ { case Null: b, _ = DecodeIfNull(b) - return b, nil, nil + nilVal, err := client.NewNormalNil(kind) + return b, nilVal, err case Int: var v int64 var err error @@ -81,9 +102,9 @@ func DecodeFieldValue(b []byte, descending bool) ([]byte, any, error) { b, v, err = DecodeVarintAscending(b) } if err != nil { - return nil, nil, NewErrCanNotDecodeFieldValue(b, client.FieldKind_NILLABLE_INT, err) + return nil, nil, NewErrCanNotDecodeFieldValue(b, kind, err) } - return b, v, nil + return b, client.NewNormalInt(v), nil case Float: var v float64 var err error @@ -93,9 +114,9 @@ func DecodeFieldValue(b []byte, descending bool) ([]byte, any, error) { b, v, err = DecodeFloatAscending(b) } if err != nil { - return nil, nil, NewErrCanNotDecodeFieldValue(b, client.FieldKind_NILLABLE_FLOAT, err) + return nil, nil, NewErrCanNotDecodeFieldValue(b, kind, err) } - return b, v, nil + return b, client.NewNormalFloat(v), nil case Bytes, BytesDesc: var v []byte var err error @@ -105,10 +126,10 @@ func DecodeFieldValue(b []byte, descending bool) ([]byte, any, error) { b, v, err = DecodeBytesAscending(b) } if err != nil { - return nil, nil, NewErrCanNotDecodeFieldValue(b, client.FieldKind_NILLABLE_STRING, err) + return nil, nil, NewErrCanNotDecodeFieldValue(b, kind, err) } - return b, v, nil + return b, client.NewNormalString(v), nil } - return nil, nil, NewErrCanNotDecodeFieldValue(b, client.FieldKind_NILLABLE_STRING) + return nil, nil, NewErrCanNotDecodeFieldValue(b, kind) } diff --git a/encoding/field_value_test.go b/encoding/field_value_test.go index a08446cb1f..69a8096f85 100644 --- a/encoding/field_value_test.go +++ b/encoding/field_value_test.go @@ -15,57 +15,63 @@ import ( "testing" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/sourcenetwork/defradb/client" ) func TestEncodeDecodeFieldValue(t *testing.T) { + normalNil, err := client.NewNormalNil(client.FieldKind_NILLABLE_INT) + require.NoError(t, err) + tests := []struct { name string - inputVal any + inputVal client.NormalValue expectedBytes []byte expectedBytesDesc []byte expectedDecodedVal any }{ { name: "nil", - inputVal: nil, + inputVal: normalNil, expectedBytes: EncodeNullAscending(nil), expectedBytesDesc: EncodeNullDescending(nil), - expectedDecodedVal: nil, + expectedDecodedVal: normalNil, }, { name: "bool true", - inputVal: true, + inputVal: client.NewNormalBool(true), expectedBytes: EncodeVarintAscending(nil, 1), expectedBytesDesc: EncodeVarintDescending(nil, 1), - expectedDecodedVal: int64(1), + expectedDecodedVal: client.NewNormalInt(1), }, { name: "bool false", - inputVal: false, + inputVal: client.NewNormalBool(false), expectedBytes: EncodeVarintAscending(nil, 0), expectedBytesDesc: EncodeVarintDescending(nil, 0), - expectedDecodedVal: int64(0), + expectedDecodedVal: client.NewNormalInt(0), }, { name: "int", - inputVal: int64(55), + inputVal: client.NewNormalInt(55), expectedBytes: EncodeVarintAscending(nil, 55), expectedBytesDesc: EncodeVarintDescending(nil, 55), - expectedDecodedVal: int64(55), + expectedDecodedVal: client.NewNormalInt(55), }, { name: "float", - inputVal: 0.2, + inputVal: client.NewNormalFloat(0.2), expectedBytes: EncodeFloatAscending(nil, 0.2), expectedBytesDesc: EncodeFloatDescending(nil, 0.2), - expectedDecodedVal: 0.2, + expectedDecodedVal: client.NewNormalFloat(0.2), }, { name: "string", - inputVal: "str", + inputVal: client.NewNormalString("str"), expectedBytes: EncodeBytesAscending(nil, []byte("str")), expectedBytesDesc: EncodeBytesDescending(nil, []byte("str")), - expectedDecodedVal: []byte("str"), + expectedDecodedVal: client.NewNormalString("str"), }, } @@ -85,7 +91,7 @@ func TestEncodeDecodeFieldValue(t *testing.T) { t.Errorf("EncodeFieldValue() = %v, want %v", encoded, expectedBytes) } - _, decodedFieldVal, err := DecodeFieldValue(encoded, descending) + _, decodedFieldVal, err := DecodeFieldValue(encoded, descending, client.FieldKind_NILLABLE_INT) assert.NoError(t, err) if !reflect.DeepEqual(decodedFieldVal, tt.expectedDecodedVal) { t.Errorf("DecodeFieldValue() = %v, want %v", decodedFieldVal, tt.expectedDecodedVal) @@ -134,7 +140,7 @@ func TestDecodeInvalidFieldValue(t *testing.T) { if descending { inputBytes = tt.inputBytesDesc } - _, _, err := DecodeFieldValue(inputBytes, descending) + _, _, err := DecodeFieldValue(inputBytes, descending, client.FieldKind_NILLABLE_INT) assert.ErrorIs(t, err, ErrCanNotDecodeFieldValue) }) } diff --git a/examples/dpi_policy/user_dpi_policy.json b/examples/dpi_policy/user_dpi_policy.json new file mode 100644 index 0000000000..74028d8ee6 --- /dev/null +++ b/examples/dpi_policy/user_dpi_policy.json @@ -0,0 +1,30 @@ +{ + "description": "A Valid Defra Policy Interface (DPI)", + "actor": { + "name": "actor" + }, + "resources": { + "users": { + "permissions": { + "read": { + "expr": "owner + reader" + }, + "write": { + "expr": "owner" + } + }, + "relations": { + "owner": { + "types": [ + "actor" + ] + }, + "reader": { + "types": [ + "actor" + ] + } + } + } + } +} diff --git a/examples/dpi_policy/user_dpi_policy.yml b/examples/dpi_policy/user_dpi_policy.yml new file mode 100644 index 0000000000..fafae06957 --- /dev/null +++ b/examples/dpi_policy/user_dpi_policy.yml @@ -0,0 +1,29 @@ +# The below policy contains an example with valid DPI compliant resource that can be linked to a collection +# object during the schema add command to have access control enabled for documents of that collection. +# +# This policy is specified to the Users object example in: `examples/schema/permissioned/users.graphql` +# +# The same policy example in json format is in: `examples/dpi_policy/user_dpi_policy.json` +# +# Learn more about the DefraDB Policy Interface [DPI](/acp/README.md) + +description: A Valid DefraDB Policy Interface (DPI) + +actor: + name: actor + +resources: + users: + permissions: + read: + expr: owner + reader + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor diff --git a/examples/schema/permissioned/book.graphql b/examples/schema/permissioned/book.graphql new file mode 100644 index 0000000000..96bdcbb877 --- /dev/null +++ b/examples/schema/permissioned/book.graphql @@ -0,0 +1,14 @@ +# The below sdl contains an example `Book` object with an example source hub policy id and resource name. +# +# The policy id must exist in sourcehub (for remote acp) or local acp first, and the resource name +# must exist on the corresponding policy to the policy id. +# +# Note: The resource name does not need to be similar to the collection name. +# +# The policy must be a valid DPI, learn more about the DefraDB Policy Interface [DPI](/acp/README.md) + +type Book @policy(id:"7dc51aabc0248cf106265c902bf56faa1989ec41a6bbd36b6e438cfade7aee4a", resource:"book") { + name: String + rating: Float +} + diff --git a/examples/schema/permissioned/users.graphql b/examples/schema/permissioned/users.graphql new file mode 100644 index 0000000000..771e6da2c9 --- /dev/null +++ b/examples/schema/permissioned/users.graphql @@ -0,0 +1,18 @@ +# The below sdl contains an example `Users` object with an example source hub policy id and resource name. +# +# The policy id must exist in sourcehub (for remote acp) or local acp first, and the resource name +# must exist on the corresponding policy to the policy id. +# +# The resource name does not need to be similar to the collection name. +# +# The linked policy id and resource correspond to the policy at: `examples/dpi_policy/user_dpi_policy.yml` +# +# The policy must be a valid DPI, learn more about the DefraDB Policy Interface [DPI](/acp/README.md) + +type Users @policy( + id: "24ab8cba6d6f0bcfe4d2712c7d95c09dd1b8076ea5a8896476413fd6c891c18c", + resource: "users" +) { + name: String + age: Int +} diff --git a/go.mod b/go.mod index 3d114d62ca..9d236c86de 100644 --- a/go.mod +++ b/go.mod @@ -1,39 +1,41 @@ module github.com/sourcenetwork/defradb -go 1.21 +go 1.21.3 require ( github.com/bits-and-blooms/bitset v1.13.0 github.com/bxcodec/faker v2.0.1+incompatible + github.com/cosmos/gogoproto v1.4.12 github.com/evanphx/json-patch/v5 v5.9.0 github.com/fxamacker/cbor/v2 v2.6.0 - github.com/getkin/kin-openapi v0.123.0 + github.com/getkin/kin-openapi v0.124.0 github.com/go-chi/chi/v5 v5.0.12 github.com/go-chi/cors v1.2.1 github.com/go-errors/errors v1.5.1 - github.com/gofrs/uuid/v5 v5.0.0 + github.com/gofrs/uuid/v5 v5.1.0 github.com/iancoleman/strcase v0.3.0 - github.com/ipfs/boxo v0.18.0 + github.com/ipfs/boxo v0.19.0 github.com/ipfs/go-block-format v0.2.0 github.com/ipfs/go-cid v0.4.1 github.com/ipfs/go-datastore v0.6.0 github.com/ipfs/go-ipld-format v0.6.0 - github.com/ipfs/go-log v1.0.5 github.com/ipfs/go-log/v2 v2.5.1 github.com/jbenet/goprocess v0.1.4 github.com/lens-vm/lens/host-go v0.0.0-20231127204031-8d858ed2926c - github.com/libp2p/go-libp2p v0.32.2 + github.com/libp2p/go-libp2p v0.33.2 github.com/libp2p/go-libp2p-gostream v0.6.0 github.com/libp2p/go-libp2p-kad-dht v0.25.2 github.com/libp2p/go-libp2p-pubsub v0.10.0 github.com/libp2p/go-libp2p-record v0.2.0 - github.com/multiformats/go-multiaddr v0.12.2 + github.com/multiformats/go-multiaddr v0.12.3 github.com/multiformats/go-multibase v0.2.0 github.com/multiformats/go-multihash v0.2.3 github.com/sourcenetwork/badger/v4 v4.2.1-0.20231113215945-a63444ca5276 + github.com/sourcenetwork/corelog v0.0.7 github.com/sourcenetwork/go-libp2p-pubsub-rpc v0.0.13 github.com/sourcenetwork/graphql-go v0.7.10-0.20231113214537-a9560c1898dd github.com/sourcenetwork/immutable v0.3.0 + github.com/sourcenetwork/sourcehub v0.2.1-0.20240305165631-9b75b1000724 github.com/spf13/cobra v1.8.0 github.com/spf13/pflag v1.0.5 github.com/spf13/viper v1.18.2 @@ -42,56 +44,134 @@ require ( github.com/ugorji/go/codec v1.2.12 github.com/valyala/fastjson v1.6.4 github.com/vito/go-sse v1.0.0 - go.opentelemetry.io/otel/metric v1.24.0 - go.opentelemetry.io/otel/sdk/metric v1.24.0 + go.opentelemetry.io/otel/metric v1.26.0 + go.opentelemetry.io/otel/sdk/metric v1.26.0 go.uber.org/zap v1.27.0 - golang.org/x/exp v0.0.0-20240103183307-be819d1f06fc - google.golang.org/grpc v1.62.0 - google.golang.org/protobuf v1.32.0 + golang.org/x/exp v0.0.0-20240222234643-814bf88cf225 + google.golang.org/grpc v1.63.2 + google.golang.org/protobuf v1.33.0 ) require ( + buf.build/gen/go/bufbuild/protovalidate/protocolbuffers/go v1.31.0-20230802163732-1c33ebd9ecfa.1 // indirect + cosmossdk.io/api v0.7.3 // indirect + cosmossdk.io/collections v0.4.0 // indirect + cosmossdk.io/core v0.11.0 // indirect + cosmossdk.io/depinject v1.0.0-alpha.4 // indirect + cosmossdk.io/errors v1.0.1 // indirect + cosmossdk.io/log v1.3.1 // indirect + cosmossdk.io/math v1.3.0 // indirect + cosmossdk.io/store v1.0.2 // indirect + cosmossdk.io/x/tx v0.13.1 // indirect + filippo.io/edwards25519 v1.0.0 // indirect + github.com/99designs/go-keychain v0.0.0-20191008050251-8e49817e8af4 // indirect + github.com/99designs/keyring v1.2.1 // indirect + github.com/DataDog/datadog-go v3.2.0+incompatible // indirect + github.com/DataDog/zstd v1.5.5 // indirect github.com/Jorropo/jsync v1.0.1 // indirect + github.com/NathanBaulch/protoc-gen-cobra v1.2.1 // indirect + github.com/awalterschulze/gographviz v2.0.3+incompatible // indirect github.com/benbjohnson/clock v1.3.5 // indirect github.com/beorn7/perks v1.0.1 // indirect + github.com/bgentry/speakeasy v0.1.1-0.20220910012023-760eaf8b6816 // indirect + github.com/btcsuite/btcd v0.22.1 // indirect + github.com/btcsuite/btcd/btcec/v2 v2.3.2 // indirect + github.com/btcsuite/btcutil v1.0.3-0.20201208143702-a53e38424cce // indirect github.com/bytecodealliance/wasmtime-go/v15 v15.0.0 // indirect + github.com/cenkalti/backoff/v4 v4.2.1 // indirect + github.com/cespare/xxhash v1.1.0 // indirect github.com/cespare/xxhash/v2 v2.2.0 // indirect + github.com/cockroachdb/errors v1.11.1 // indirect + github.com/cockroachdb/logtags v0.0.0-20230118201751-21c54148d20b // indirect + github.com/cockroachdb/pebble v1.1.0 // indirect + github.com/cockroachdb/redact v1.1.5 // indirect + github.com/cockroachdb/tokenbucket v0.0.0-20230807174530-cc333fc44b06 // indirect + github.com/cometbft/cometbft v0.38.6 // indirect + github.com/cometbft/cometbft-db v0.9.1 // indirect github.com/containerd/cgroups v1.1.0 // indirect github.com/coreos/go-systemd/v22 v22.5.0 // indirect + github.com/cosmos/btcutil v1.0.5 // indirect + github.com/cosmos/cosmos-db v1.0.2 // indirect + github.com/cosmos/cosmos-proto v1.0.0-beta.4 // indirect + github.com/cosmos/cosmos-sdk v0.50.5 // indirect + github.com/cosmos/go-bip39 v1.0.0 // indirect + github.com/cosmos/gogogateway v1.2.0 // indirect + github.com/cosmos/gorocksdb v1.2.0 // indirect + github.com/cosmos/iavl v1.0.1 // indirect + github.com/cosmos/ics23/go v0.10.0 // indirect + github.com/cosmos/ledger-cosmos-go v0.13.3 // indirect github.com/cpuguy83/go-md2man/v2 v2.0.3 // indirect github.com/cskr/pubsub v1.0.2 // indirect + github.com/danieljoos/wincred v1.2.0 // indirect github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect github.com/davidlazar/go-crypto v0.0.0-20200604182044-b73af7476f6c // indirect github.com/decred/dcrd/dcrec/secp256k1/v4 v4.2.0 // indirect + github.com/desertbit/timer v0.0.0-20180107155436-c41aec40b27f // indirect + github.com/dgraph-io/badger/v2 v2.2007.4 // indirect github.com/dgraph-io/ristretto v0.1.1 // indirect + github.com/dgryski/go-farm v0.0.0-20200201041132-a6ae2369ad13 // indirect github.com/docker/go-units v0.5.0 // indirect github.com/dustin/go-humanize v1.0.1 // indirect + github.com/dvsekhvalnov/jose2go v1.6.0 // indirect github.com/elastic/gosigar v0.14.2 // indirect - github.com/flynn/noise v1.0.1 // indirect + github.com/emicklei/dot v1.6.1 // indirect + github.com/fatih/color v1.15.0 // indirect + github.com/felixge/httpsnoop v1.0.4 // indirect + github.com/flynn/noise v1.1.0 // indirect github.com/francoispqt/gojay v1.2.13 // indirect github.com/fsnotify/fsnotify v1.7.0 // indirect + github.com/getsentry/sentry-go v0.27.0 // indirect + github.com/go-jose/go-jose/v3 v3.0.3 // indirect + github.com/go-kit/kit v0.12.0 // indirect + github.com/go-kit/log v0.2.1 // indirect + github.com/go-logfmt/logfmt v0.6.0 // indirect github.com/go-logr/logr v1.4.1 // indirect github.com/go-logr/stdr v1.2.2 // indirect github.com/go-openapi/jsonpointer v0.20.2 // indirect github.com/go-openapi/swag v0.22.8 // indirect github.com/go-task/slim-sprig v0.0.0-20230315185526-52ccab3ef572 // indirect + github.com/godbus/dbus v0.0.0-20190726142602-4481cbc300e2 // indirect github.com/godbus/dbus/v5 v5.1.0 // indirect + github.com/gogo/googleapis v1.4.1 // indirect github.com/gogo/protobuf v1.3.2 // indirect github.com/golang/glog v1.2.0 // indirect github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect - github.com/golang/protobuf v1.5.3 // indirect + github.com/golang/protobuf v1.5.4 // indirect + github.com/golang/snappy v0.0.4 // indirect + github.com/google/btree v1.1.2 // indirect github.com/google/flatbuffers v2.0.6+incompatible // indirect + github.com/google/go-cmp v0.6.0 // indirect github.com/google/gopacket v1.1.19 // indirect - github.com/google/pprof v0.0.0-20231229205709-960ae82b1e42 // indirect + github.com/google/orderedcode v0.0.1 // indirect + github.com/google/pprof v0.0.0-20240207164012-fb44976bdcd5 // indirect github.com/google/uuid v1.6.0 // indirect - github.com/gorilla/websocket v1.5.0 // indirect + github.com/gorilla/handlers v1.5.2 // indirect + github.com/gorilla/mux v1.8.1 // indirect + github.com/gorilla/websocket v1.5.1 // indirect + github.com/grpc-ecosystem/go-grpc-middleware v1.4.0 // indirect + github.com/grpc-ecosystem/grpc-gateway v1.16.0 // indirect + github.com/grpc-ecosystem/grpc-gateway/v2 v2.19.0 // indirect + github.com/gsterjov/go-libsecret v0.0.0-20161001094733-a6f4afe4910c // indirect github.com/hashicorp/errwrap v1.1.0 // indirect + github.com/hashicorp/go-hclog v1.5.0 // indirect + github.com/hashicorp/go-immutable-radix v1.3.1 // indirect + github.com/hashicorp/go-metrics v0.5.2 // indirect github.com/hashicorp/go-multierror v1.1.1 // indirect + github.com/hashicorp/go-plugin v1.5.2 // indirect github.com/hashicorp/golang-lru v1.0.2 // indirect github.com/hashicorp/golang-lru/arc/v2 v2.0.5 // indirect github.com/hashicorp/golang-lru/v2 v2.0.7 // indirect github.com/hashicorp/hcl v1.0.0 // indirect + github.com/hashicorp/yamux v0.1.1 // indirect + github.com/hdevalence/ed25519consensus v0.1.0 // indirect + github.com/huandu/skiplist v1.2.0 // indirect github.com/huin/goupnp v1.3.0 // indirect + github.com/hyperledger/aries-framework-go v0.3.2 // indirect + github.com/hyperledger/aries-framework-go/component/kmscrypto v0.0.0-20230427134832-0c9969493bd3 // indirect + github.com/hyperledger/aries-framework-go/component/log v0.0.0-20230427134832-0c9969493bd3 // indirect + github.com/hyperledger/aries-framework-go/component/models v0.0.0-20230501135648-a9a7ad029347 // indirect + github.com/hyperledger/aries-framework-go/spi v0.0.0-20230427134832-0c9969493bd3 // indirect + github.com/improbable-eng/grpc-web v0.15.0 // indirect github.com/inconshreveable/mousetrap v1.1.0 // indirect github.com/invopop/yaml v0.2.0 // indirect github.com/ipfs/bbloom v0.0.4 // indirect @@ -99,6 +179,7 @@ require ( github.com/ipfs/go-ipfs-pq v0.0.3 // indirect github.com/ipfs/go-ipfs-util v0.0.3 // indirect github.com/ipfs/go-ipld-legacy v0.2.1 // indirect + github.com/ipfs/go-log v1.0.5 // indirect github.com/ipfs/go-metrics-interface v0.0.1 // indirect github.com/ipfs/go-peertaskqueue v0.8.1 // indirect github.com/ipfs/kubo v0.25.0 // indirect @@ -106,10 +187,15 @@ require ( github.com/ipld/go-ipld-prime v0.21.0 // indirect github.com/jackpal/go-nat-pmp v1.0.2 // indirect github.com/jbenet/go-temp-err-catcher v0.1.0 // indirect + github.com/jmhodges/levigo v1.0.0 // indirect github.com/josharian/intern v1.0.0 // indirect - github.com/klauspost/compress v1.17.4 // indirect - github.com/klauspost/cpuid/v2 v2.2.6 // indirect + github.com/kilic/bls12-381 v0.1.1-0.20210503002446-7b7597926c69 // indirect + github.com/klauspost/compress v1.17.7 // indirect + github.com/klauspost/cpuid/v2 v2.2.7 // indirect github.com/koron/go-ssdp v0.0.4 // indirect + github.com/kr/pretty v0.3.1 // indirect + github.com/kr/text v0.2.0 // indirect + github.com/lib/pq v1.10.7 // indirect github.com/libp2p/go-buffer-pool v0.1.0 // indirect github.com/libp2p/go-cidranger v1.1.0 // indirect github.com/libp2p/go-flow-metrics v0.1.0 // indirect @@ -121,18 +207,23 @@ require ( github.com/libp2p/go-netroute v0.2.1 // indirect github.com/libp2p/go-reuseport v0.4.0 // indirect github.com/libp2p/go-yamux/v4 v4.0.1 // indirect + github.com/linxGnu/grocksdb v1.8.12 // indirect + github.com/lmittmann/tint v1.0.4 // indirect github.com/magiconair/properties v1.8.7 // indirect github.com/mailru/easyjson v0.7.7 // indirect github.com/marten-seemann/tcp v0.0.0-20210406111302-dfbc87cc63fd // indirect + github.com/mattn/go-colorable v0.1.13 // indirect github.com/mattn/go-isatty v0.0.20 // indirect - github.com/matttproud/golang_protobuf_extensions/v2 v2.0.0 // indirect - github.com/miekg/dns v1.1.57 // indirect + github.com/miekg/dns v1.1.58 // indirect github.com/mikioh/tcpinfo v0.0.0-20190314235526-30a79bb1804b // indirect github.com/mikioh/tcpopt v0.0.0-20190314235656-172688c1accc // indirect + github.com/minio/highwayhash v1.0.2 // indirect github.com/minio/sha256-simd v1.0.1 // indirect + github.com/mitchellh/go-testing-interface v1.14.1 // indirect github.com/mitchellh/mapstructure v1.5.0 // indirect github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826 // indirect github.com/mr-tron/base58 v1.2.0 // indirect + github.com/mtibben/percent v0.2.1 // indirect github.com/multiformats/go-base32 v0.1.0 // indirect github.com/multiformats/go-base36 v0.2.0 // indirect github.com/multiformats/go-multiaddr-dns v0.3.1 // indirect @@ -140,56 +231,82 @@ require ( github.com/multiformats/go-multicodec v0.9.0 // indirect github.com/multiformats/go-multistream v0.5.0 // indirect github.com/multiformats/go-varint v0.0.7 // indirect - github.com/onsi/ginkgo v1.16.5 // indirect - github.com/onsi/ginkgo/v2 v2.13.2 // indirect - github.com/opencontainers/runtime-spec v1.1.0 // indirect + github.com/oasisprotocol/curve25519-voi v0.0.0-20230904125328-1f23a7beb09a // indirect + github.com/oklog/run v1.1.0 // indirect + github.com/onsi/ginkgo/v2 v2.15.0 // indirect + github.com/opencontainers/runtime-spec v1.2.0 // indirect github.com/opentracing/opentracing-go v1.2.0 // indirect github.com/pbnjay/memory v0.0.0-20210728143218-7b4eea64cf58 // indirect github.com/pelletier/go-toml/v2 v2.1.0 // indirect github.com/perimeterx/marshmallow v1.1.5 // indirect + github.com/petermattis/goid v0.0.0-20230904192822-1876fd5063bc // indirect + github.com/piprate/json-gold v0.5.0 // indirect github.com/pkg/errors v0.9.1 // indirect github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect github.com/polydawn/refmt v0.89.0 // indirect + github.com/pquerna/cachecontrol v0.1.0 // indirect github.com/prometheus/client_golang v1.18.0 // indirect - github.com/prometheus/client_model v0.5.0 // indirect - github.com/prometheus/common v0.45.0 // indirect + github.com/prometheus/client_model v0.6.0 // indirect + github.com/prometheus/common v0.47.0 // indirect github.com/prometheus/procfs v0.12.0 // indirect github.com/quic-go/qpack v0.4.0 // indirect - github.com/quic-go/qtls-go1-20 v0.4.1 // indirect - github.com/quic-go/quic-go v0.40.1 // indirect + github.com/quic-go/quic-go v0.42.0 // indirect github.com/quic-go/webtransport-go v0.6.0 // indirect github.com/raulk/go-watchdog v1.3.0 // indirect + github.com/rcrowley/go-metrics v0.0.0-20201227073835-cf1acfcdf475 // indirect + github.com/rogpeppe/go-internal v1.12.0 // indirect + github.com/rs/cors v1.10.1 // indirect + github.com/rs/zerolog v1.32.0 // indirect github.com/russross/blackfriday/v2 v2.1.0 // indirect github.com/sagikazarmark/locafero v0.4.0 // indirect github.com/sagikazarmark/slog-shim v0.1.0 // indirect + github.com/sasha-s/go-deadlock v0.3.1 // indirect github.com/sourcegraph/conc v0.3.0 // indirect + github.com/sourcenetwork/raccoondb v0.2.0 // indirect + github.com/sourcenetwork/zanzi v0.3.0 // indirect github.com/spaolacci/murmur3 v1.1.0 // indirect github.com/spf13/afero v1.11.0 // indirect github.com/spf13/cast v1.6.0 // indirect github.com/stretchr/objx v0.5.2 // indirect github.com/subosito/gotenv v1.6.0 // indirect - github.com/syndtr/goleveldb v1.0.1-0.20210819022825-2ae1ddf74ef7 // indirect + github.com/syndtr/goleveldb v1.0.1-0.20220721030215-126854af5e6d // indirect + github.com/tendermint/go-amino v0.16.0 // indirect + github.com/tendermint/tm-db v0.6.7 // indirect + github.com/teserakt-io/golang-ed25519 v0.0.0-20210104091850-3888c087a4c8 // indirect github.com/textileio/go-log/v2 v2.1.3-gke-2 // indirect github.com/whyrusleeping/go-keyspace v0.0.0-20160322163242-5b898ac5add1 // indirect github.com/x448/float16 v0.8.4 // indirect + github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb // indirect + github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 // indirect + github.com/xeipuuv/gojsonschema v1.2.0 // indirect + github.com/zondax/hid v0.9.2 // indirect + github.com/zondax/ledger-go v0.14.3 // indirect + go.etcd.io/bbolt v1.3.8 // indirect go.opencensus.io v0.24.0 // indirect - go.opentelemetry.io/otel v1.24.0 // indirect - go.opentelemetry.io/otel/sdk v1.24.0 // indirect - go.opentelemetry.io/otel/trace v1.24.0 // indirect + go.opentelemetry.io/otel v1.26.0 // indirect + go.opentelemetry.io/otel/sdk v1.26.0 // indirect + go.opentelemetry.io/otel/trace v1.26.0 // indirect go.uber.org/dig v1.17.1 // indirect go.uber.org/fx v1.20.1 // indirect go.uber.org/mock v0.4.0 // indirect go.uber.org/multierr v1.11.0 // indirect - golang.org/x/crypto v0.19.0 // indirect - golang.org/x/mod v0.14.0 // indirect - golang.org/x/net v0.21.0 // indirect + golang.org/x/crypto v0.21.0 // indirect + golang.org/x/mod v0.15.0 // indirect + golang.org/x/net v0.23.0 // indirect golang.org/x/sync v0.6.0 // indirect - golang.org/x/sys v0.17.0 // indirect + golang.org/x/sys v0.19.0 // indirect + golang.org/x/term v0.19.0 // indirect golang.org/x/text v0.14.0 // indirect - golang.org/x/tools v0.16.1 // indirect + golang.org/x/tools v0.18.0 // indirect gonum.org/v1/gonum v0.14.0 // indirect - google.golang.org/genproto/googleapis/rpc v0.0.0-20240123012728-ef4313101c80 // indirect + google.golang.org/genproto v0.0.0-20240227224415-6ceb2ff114de // indirect + google.golang.org/genproto/googleapis/api v0.0.0-20240227224415-6ceb2ff114de // indirect + google.golang.org/genproto/googleapis/rpc v0.0.0-20240227224415-6ceb2ff114de // indirect gopkg.in/ini.v1 v1.67.0 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect + gotest.tools/v3 v3.5.1 // indirect lukechampine.com/blake3 v1.2.1 // indirect + nhooyr.io/websocket v1.8.7 // indirect + pgregory.net/rapid v1.1.0 // indirect + sigs.k8s.io/yaml v1.4.0 // indirect ) diff --git a/go.sum b/go.sum index deb2f24823..cbe3eea36c 100644 --- a/go.sum +++ b/go.sum @@ -1,61 +1,228 @@ +buf.build/gen/go/bufbuild/protovalidate/protocolbuffers/go v1.31.0-20230802163732-1c33ebd9ecfa.1 h1:tdpHgTbmbvEIARu+bixzmleMi14+3imnpoFXz+Qzjp4= +buf.build/gen/go/bufbuild/protovalidate/protocolbuffers/go v1.31.0-20230802163732-1c33ebd9ecfa.1/go.mod h1:xafc+XIsTxTy76GJQ1TKgvJWsSugFBqMaN27WhUblew= cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= cloud.google.com/go v0.31.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= cloud.google.com/go v0.37.0/go.mod h1:TS1dMSSfndXH133OKGwekG838Om/cQT0BUHV3HcBgoo= +cosmossdk.io/api v0.7.3 h1:V815i8YOwOAQa1rLCsSMjVG5Gnzs02JLq+l7ks8s1jk= +cosmossdk.io/api v0.7.3/go.mod h1:IcxpYS5fMemZGqyYtErK7OqvdM0C8kdW3dq8Q/XIG38= +cosmossdk.io/collections v0.4.0 h1:PFmwj2W8szgpD5nOd8GWH6AbYNi1f2J6akWXJ7P5t9s= +cosmossdk.io/collections v0.4.0/go.mod h1:oa5lUING2dP+gdDquow+QjlF45eL1t4TJDypgGd+tv0= +cosmossdk.io/core v0.11.0 h1:vtIafqUi+1ZNAE/oxLOQQ7Oek2n4S48SWLG8h/+wdbo= +cosmossdk.io/core v0.11.0/go.mod h1:LaTtayWBSoacF5xNzoF8tmLhehqlA9z1SWiPuNC6X1w= +cosmossdk.io/depinject v1.0.0-alpha.4 h1:PLNp8ZYAMPTUKyG9IK2hsbciDWqna2z1Wsl98okJopc= +cosmossdk.io/depinject v1.0.0-alpha.4/go.mod h1:HeDk7IkR5ckZ3lMGs/o91AVUc7E596vMaOmslGFM3yU= +cosmossdk.io/errors v1.0.1 h1:bzu+Kcr0kS/1DuPBtUFdWjzLqyUuCiyHjyJB6srBV/0= +cosmossdk.io/errors v1.0.1/go.mod h1:MeelVSZThMi4bEakzhhhE/CKqVv3nOJDA25bIqRDu/U= +cosmossdk.io/log v1.3.1 h1:UZx8nWIkfbbNEWusZqzAx3ZGvu54TZacWib3EzUYmGI= +cosmossdk.io/log v1.3.1/go.mod h1:2/dIomt8mKdk6vl3OWJcPk2be3pGOS8OQaLUM/3/tCM= +cosmossdk.io/math v1.3.0 h1:RC+jryuKeytIiictDslBP9i1fhkVm6ZDmZEoNP316zE= +cosmossdk.io/math v1.3.0/go.mod h1:vnRTxewy+M7BtXBNFybkuhSH4WfedVAAnERHgVFhp3k= +cosmossdk.io/store v1.0.2 h1:lSg5BTvJBHUDwswNNyeh4K/CbqiHER73VU4nDNb8uk0= +cosmossdk.io/store v1.0.2/go.mod h1:EFtENTqVTuWwitGW1VwaBct+yDagk7oG/axBMPH+FXs= +cosmossdk.io/x/tx v0.13.1 h1:Mg+EMp67Pz+NukbJqYxuo8uRp7N/a9uR+oVS9pONtj8= +cosmossdk.io/x/tx v0.13.1/go.mod h1:CBCU6fsRVz23QGFIQBb1DNX2DztJCf3jWyEkHY2nJQ0= dmitri.shuralyov.com/app/changes v0.0.0-20180602232624-0a106ad413e3/go.mod h1:Yl+fi1br7+Rr3LqpNJf1/uxUdtRUV+Tnj0o93V2B9MU= +dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= dmitri.shuralyov.com/html/belt v0.0.0-20180602232347-f7d459c86be0/go.mod h1:JLBrvjyP0v+ecvNYvCpyZgu5/xkfAUhi6wJj28eUfSU= dmitri.shuralyov.com/service/change v0.0.0-20181023043359-a85b471d5412/go.mod h1:a1inKt/atXimZ4Mv927x+r7UpyzRUf4emIoiiSC2TN4= dmitri.shuralyov.com/state v0.0.0-20180228185332-28bcc343414c/go.mod h1:0PRwlb0D6DFvNNtx+9ybjezNCa8XF0xaYcETyp6rHWU= +filippo.io/edwards25519 v1.0.0 h1:0wAIcmJUqRdI8IJ/3eGi5/HwXZWPujYXXlkrQogz0Ek= +filippo.io/edwards25519 v1.0.0/go.mod h1:N1IkdkCkiLB6tki+MYJoSx2JTY9NUlxZE7eHn5EwJns= git.apache.org/thrift.git v0.0.0-20180902110319-2566ecd5d999/go.mod h1:fPE2ZNJGynbRyZ4dJvy6G277gSllfV2HJqblrnkyeyg= +github.com/99designs/go-keychain v0.0.0-20191008050251-8e49817e8af4 h1:/vQbFIOMbk2FiG/kXiLl8BRyzTWDw7gX/Hz7Dd5eDMs= +github.com/99designs/go-keychain v0.0.0-20191008050251-8e49817e8af4/go.mod h1:hN7oaIRCjzsZ2dE+yG5k+rsdt3qcwykqK6HVGcKwsw4= +github.com/99designs/keyring v1.2.1 h1:tYLp1ULvO7i3fI5vE21ReQuj99QFSs7lGm0xWyJo87o= +github.com/99designs/keyring v1.2.1/go.mod h1:fc+wB5KTk9wQ9sDx0kFXB3A0MaeGHM9AwRStKOQ5vOA= github.com/AndreasBriese/bbloom v0.0.0-20190825152654-46b345b51c96 h1:cTp8I5+VIoKjsnZuH8vjyaysT/ses3EvZeaV/1UkF2M= github.com/AndreasBriese/bbloom v0.0.0-20190825152654-46b345b51c96/go.mod h1:bOvUY6CB00SOBii9/FifXqc0awNKxLFCL/+pkDPuyl8= +github.com/Azure/go-ansiterm v0.0.0-20230124172434-306776ec8161 h1:L/gRVlceqvL25UVaW/CKtUDjefjrs0SPonmDGUVOYP0= +github.com/Azure/go-ansiterm v0.0.0-20230124172434-306776ec8161/go.mod h1:xomTg63KZ2rFqZQzSB4Vz2SUXa1BpHTVz9L5PTmPC4E= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= -github.com/DataDog/zstd v1.4.1 h1:3oxKN3wbHibqx897utPC2LTQU4J+IHWWJO+glkAkpFM= -github.com/DataDog/zstd v1.4.1/go.mod h1:1jcaCB/ufaK+sKp1NBhlGmpz41jOoPQ35bpF36t7BBo= +github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= +github.com/DataDog/datadog-go v3.2.0+incompatible h1:qSG2N4FghB1He/r2mFrWKCaL7dXCilEuNEeAn20fdD4= +github.com/DataDog/datadog-go v3.2.0+incompatible/go.mod h1:LButxg5PwREeZtORoXG3tL4fMGNddJ+vMq1mwgfaqoQ= +github.com/DataDog/zstd v1.5.5 h1:oWf5W7GtOLgp6bciQYDmhHHjdhYkALu6S/5Ni9ZgSvQ= +github.com/DataDog/zstd v1.5.5/go.mod h1:g4AWEaM3yOg3HYfnJ3YIawPnVdXJh9QME85blwSAmyw= github.com/Jorropo/jsync v1.0.1 h1:6HgRolFZnsdfzRUj+ImB9og1JYOxQoReSywkHOGSaUU= github.com/Jorropo/jsync v1.0.1/go.mod h1:jCOZj3vrBCri3bSU3ErUYvevKlnbssrXeCivybS5ABQ= +github.com/Knetic/govaluate v3.0.1-0.20171022003610-9aa49832a739+incompatible/go.mod h1:r7JcOSlj0wfOMncg0iLm8Leh48TZaKVeNIfJntJ2wa0= +github.com/Microsoft/go-winio v0.6.1 h1:9/kr64B9VUZrLm5YYwbGtUJnMgqWVOdUAXu6Migciow= +github.com/Microsoft/go-winio v0.6.1/go.mod h1:LRdKpFKfdobln8UmuiYcKPot9D2v6svN5+sAH+4kjUM= +github.com/NathanBaulch/protoc-gen-cobra v1.2.1 h1:BOqX9glwicbqDJDGndMnhHhx8psGTSjGdZzRDY1a7A8= +github.com/NathanBaulch/protoc-gen-cobra v1.2.1/go.mod h1:ZLPLEPQgV3jP3a7IEp+xxYPk8tF4lhY9ViV0hn6K3iA= +github.com/Nvveen/Gotty v0.0.0-20120604004816-cd527374f1e5 h1:TngWCqHvy9oXAN6lEVMRuU21PR1EtLVZJmdB18Gu3Rw= +github.com/Nvveen/Gotty v0.0.0-20120604004816-cd527374f1e5/go.mod h1:lmUJ/7eu/Q8D7ML55dXQrVaamCz2vxCfdQBasLZfHKk= +github.com/OneOfOne/xxhash v1.2.2 h1:KMrpdQIwFcEqXDklaen+P1axHaj9BSKzvpUUfnHldSE= +github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU= +github.com/Shopify/sarama v1.19.0/go.mod h1:FVkBWblsNy7DGZRfXLU0O9RCGt5g3g3yEuWXgklEdEo= +github.com/Shopify/toxiproxy v2.1.4+incompatible/go.mod h1:OXgGpZ6Cli1/URJOF1DMxUHB2q5Ap20/P/eIdh4G0pI= +github.com/VividCortex/gohistogram v1.0.0 h1:6+hBz+qvs0JOrrNhhmR7lFxo5sINxBCGXrdtl/UvroE= +github.com/VividCortex/gohistogram v1.0.0/go.mod h1:Pf5mBqqDxYaXu3hDrrU+w6nw50o/4+TcAqDqk/vUH7g= +github.com/adlio/schema v1.3.3 h1:oBJn8I02PyTB466pZO1UZEn1TV5XLlifBSyMrmHl/1I= +github.com/adlio/schema v1.3.3/go.mod h1:1EsRssiv9/Ce2CMzq5DoL7RiMshhuigQxrR4DMV9fHg= +github.com/aead/siphash v1.0.1/go.mod h1:Nywa3cDsYNNK3gaciGTWPwHt0wlpNV15vwmswBAUSII= +github.com/afex/hystrix-go v0.0.0-20180502004556-fa1af6a1f4f5/go.mod h1:SkGFH1ia65gfNATL8TAiHDNxPzPdmEL5uirI2Uyuz6c= +github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= +github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= +github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= +github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= +github.com/alecthomas/units v0.0.0-20190924025748-f65c72e2690d/go.mod h1:rBZYJk541a8SKzHPHnH3zbiI+7dagKZ0cgpgrD7Fyho= github.com/alecthomas/units v0.0.0-20231202071711-9a357b53e9c9 h1:ez/4by2iGztzR4L0zgAOR8lTQK9VlyBVVd7G4omaOQs= github.com/alecthomas/units v0.0.0-20231202071711-9a357b53e9c9/go.mod h1:OMCwj8VM1Kc9e19TLln2VL61YJF0x1XFtfdL4JdbSyE= github.com/anmitsu/go-shlex v0.0.0-20161002113705-648efa622239/go.mod h1:2FmKhYUyUczH0OGQWaF5ceTx0UBShxjsH6f8oGKYe2c= +github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY= +github.com/apache/thrift v0.12.0/go.mod h1:cp2SuWMxlEZw2r+iP2GNCdIi4C1qmUzdZFSVb+bacwQ= +github.com/apache/thrift v0.13.0/go.mod h1:cp2SuWMxlEZw2r+iP2GNCdIi4C1qmUzdZFSVb+bacwQ= +github.com/armon/circbuf v0.0.0-20150827004946-bbbad097214e/go.mod h1:3U/XgcO3hCbHZ8TKRvWD2dDTCfh9M9ya+I9JpbB7O8o= +github.com/armon/consul-api v0.0.0-20180202201655-eb2c6b5be1b6/go.mod h1:grANhF5doyWs3UAsr3K4I6qtAmlQcZDesFNEHPZAzj8= +github.com/armon/go-metrics v0.0.0-20180917152333-f0300d1749da/go.mod h1:Q73ZrmVTwzkszR9V5SSuryQ31EELlFMUz1kKyl939pY= +github.com/armon/go-radix v0.0.0-20180808171621-7fddfc383310/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8= +github.com/aryann/difflib v0.0.0-20170710044230-e206f873d14a/go.mod h1:DAHtR1m6lCRdSC2Tm3DSWRPvIPr6xNKyeHdqDQSQT+A= +github.com/awalterschulze/gographviz v2.0.3+incompatible h1:9sVEXJBJLwGX7EQVhLm2elIKCm7P2YHFC8v6096G09E= +github.com/awalterschulze/gographviz v2.0.3+incompatible/go.mod h1:GEV5wmg4YquNw7v1kkyoX9etIk8yVmXj+AkDHuuETHs= +github.com/aws/aws-lambda-go v1.13.3/go.mod h1:4UKl9IzQMoD+QF79YdCuzCwp8VbmG4VAQwij/eHl5CU= +github.com/aws/aws-sdk-go v1.27.0/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo= +github.com/aws/aws-sdk-go-v2 v0.18.0/go.mod h1:JWVYvqSMppoMJC0x5wdwiImzgXTI9FuZwxzkQq9wy+g= github.com/benbjohnson/clock v1.1.0/go.mod h1:J11/hYXuz8f4ySSvYwY0FKfm+ezbsZBKZxNJlLklBHA= github.com/benbjohnson/clock v1.3.0/go.mod h1:J11/hYXuz8f4ySSvYwY0FKfm+ezbsZBKZxNJlLklBHA= github.com/benbjohnson/clock v1.3.5 h1:VvXlSJBzZpA/zum6Sj74hxwYI2DIxRWuNIoXAzHZz5o= github.com/benbjohnson/clock v1.3.5/go.mod h1:J11/hYXuz8f4ySSvYwY0FKfm+ezbsZBKZxNJlLklBHA= github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q= +github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8= github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM= github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw= +github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs= +github.com/bgentry/speakeasy v0.1.1-0.20220910012023-760eaf8b6816 h1:41iFGWnSlI2gVpmOtVTJZNodLdLQLn/KsJqFvXwnd/s= +github.com/bgentry/speakeasy v0.1.1-0.20220910012023-760eaf8b6816/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs= github.com/bits-and-blooms/bitset v1.13.0 h1:bAQ9OPNFYbGHV6Nez0tmNI0RiEu7/hxlYJRUA0wFAVE= github.com/bits-and-blooms/bitset v1.13.0/go.mod h1:7hO7Gc7Pp1vODcmWvKMRA9BNmbv6a/7QIWpPxHddWR8= github.com/bradfitz/go-smtpd v0.0.0-20170404230938-deb6d6237625/go.mod h1:HYsPBTaaSFSlLx/70C2HPIMNZpVV8+vt/A+FMnYP11g= +github.com/btcsuite/btcd v0.20.1-beta/go.mod h1:wVuoA8VJLEcwgqHBwHmzLRazpKxTv13Px/pDuV7OomQ= +github.com/btcsuite/btcd v0.22.1 h1:CnwP9LM/M9xuRrGSCGeMVs9iv09uMqwsVX7EeIpgV2c= +github.com/btcsuite/btcd v0.22.1/go.mod h1:wqgTSL29+50LRkmOVknEdmt8ZojIzhuWvgu/iptuN7Y= +github.com/btcsuite/btcd/btcec/v2 v2.3.2 h1:5n0X6hX0Zk+6omWcihdYvdAlGf2DfasC0GMf7DClJ3U= +github.com/btcsuite/btcd/btcec/v2 v2.3.2/go.mod h1:zYzJ8etWJQIv1Ogk7OzpWjowwOdXY1W/17j2MW85J04= +github.com/btcsuite/btcd/btcutil v1.1.3 h1:xfbtw8lwpp0G6NwSHb+UE67ryTFHJAiNuipusjXSohQ= +github.com/btcsuite/btcd/btcutil v1.1.3/go.mod h1:UR7dsSJzJUfMmFiiLlIrMq1lS9jh9EdCV7FStZSnpi0= +github.com/btcsuite/btcd/chaincfg/chainhash v1.0.2 h1:KdUfX2zKommPRa+PD0sWZUyXe9w277ABlgELO7H04IM= +github.com/btcsuite/btcd/chaincfg/chainhash v1.0.2/go.mod h1:7SFka0XMvUgj3hfZtydOrQY2mwhPclbT2snogU7SQQc= +github.com/btcsuite/btclog v0.0.0-20170628155309-84c8d2346e9f/go.mod h1:TdznJufoqS23FtqVCzL0ZqgP5MqXbb4fg/WgDys70nA= +github.com/btcsuite/btcutil v0.0.0-20190425235716-9e5f4b9a998d/go.mod h1:+5NJ2+qvTyV9exUAL/rxXi3DcLg2Ts+ymUAY5y4NvMg= +github.com/btcsuite/btcutil v1.0.3-0.20201208143702-a53e38424cce h1:YtWJF7RHm2pYCvA5t0RPmAaLUhREsKuKd+SLhxFbFeQ= +github.com/btcsuite/btcutil v1.0.3-0.20201208143702-a53e38424cce/go.mod h1:0DVlHczLPewLcPGEIeUEzfOJhqGPQ0mJJRDBtD307+o= +github.com/btcsuite/go-socks v0.0.0-20170105172521-4720035b7bfd/go.mod h1:HHNXQzUsZCxOoE+CPiyCTO6x34Zs86zZUiwtpXoGdtg= +github.com/btcsuite/goleveldb v0.0.0-20160330041536-7834afc9e8cd/go.mod h1:F+uVaaLLH7j4eDXPRvw78tMflu7Ie2bzYOH4Y8rRKBY= +github.com/btcsuite/snappy-go v0.0.0-20151229074030-0bdef8d06723/go.mod h1:8woku9dyThutzjeg+3xrA5iCpBRH8XEEg3lh6TiUghc= +github.com/btcsuite/websocket v0.0.0-20150119174127-31079b680792/go.mod h1:ghJtEyQwv5/p4Mg4C0fgbePVuGr935/5ddU9Z3TmDRY= +github.com/btcsuite/winsvc v1.0.0/go.mod h1:jsenWakMcC0zFBFurPLEAyrnc/teJEM1O46fmI40EZs= +github.com/bufbuild/protocompile v0.6.0 h1:Uu7WiSQ6Yj9DbkdnOe7U4mNKp58y9WDMKDn28/ZlunY= +github.com/bufbuild/protocompile v0.6.0/go.mod h1:YNP35qEYoYGme7QMtz5SBCoN4kL4g12jTtjuzRNdjpE= github.com/buger/jsonparser v0.0.0-20181115193947-bf1c66bbce23/go.mod h1:bbYlZJ7hK1yFx9hf58LP0zeX7UjIGs20ufpu3evjr+s= github.com/bxcodec/faker v2.0.1+incompatible h1:P0KUpUw5w6WJXwrPfv35oc91i4d8nf40Nwln+M/+faA= github.com/bxcodec/faker v2.0.1+incompatible/go.mod h1:BNzfpVdTwnFJ6GtfYTcQu6l6rHShT+veBxNCnjCx5XM= github.com/bytecodealliance/wasmtime-go/v15 v15.0.0 h1:4R2MpSPPbtSxqdsOTvsMn1pnwdEhzbDGMao6LUUSLv4= github.com/bytecodealliance/wasmtime-go/v15 v15.0.0/go.mod h1:m6vB/SsM+pnJkVHmO1wzHYUeYtciltTKuxuvkR8pYcY= +github.com/casbin/casbin/v2 v2.1.2/go.mod h1:YcPU1XXisHhLzuxH9coDNf2FbKpjGlbCg3n9yuLkIJQ= +github.com/cenkalti/backoff v2.2.1+incompatible h1:tNowT99t7UNflLxfYYSlKYsBpXdEet03Pg2g16Swow4= +github.com/cenkalti/backoff v2.2.1+incompatible/go.mod h1:90ReRw6GdpyfrHakVjL/QHaoyV4aDUVVkXQJJJ3NXXM= +github.com/cenkalti/backoff/v4 v4.1.1/go.mod h1:scbssz8iZGpm3xbr14ovlUdkxfGXNInqkPWOWmG2CLw= +github.com/cenkalti/backoff/v4 v4.2.1 h1:y4OZtCnogmCPw98Zjyt5a6+QwPLGkiQsYW5oUqylYbM= +github.com/cenkalti/backoff/v4 v4.2.1/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyYozVcomhLiZE= github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= github.com/cespare/xxhash v1.1.0 h1:a6HrQnmkObjyL+Gs60czilIUGqrzKutQD6XZog3p+ko= github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc= github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= github.com/cespare/xxhash/v2 v2.2.0 h1:DC2CZ1Ep5Y4k3ZQ899DldepgrayRUGE6BBZ/cd9Cj44= github.com/cespare/xxhash/v2 v2.2.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI= +github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI= +github.com/chzyer/readline v1.5.1 h1:upd/6fQk4src78LMRzh5vItIt361/o4uq553V8B5sGI= +github.com/chzyer/readline v1.5.1/go.mod h1:Eh+b79XXUwfKfcPLepksvw2tcLE/Ct21YObkaSkeBlk= +github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= github.com/cilium/ebpf v0.2.0/go.mod h1:To2CFviqOWL/M0gIMsvSMlqe7em/l1ALkX1PyjrX2Qs= +github.com/circonus-labs/circonus-gometrics v2.3.1+incompatible/go.mod h1:nmEj6Dob7S7YxXgwXpfOuvO54S+tGdZdw9fuRZt25Ag= +github.com/circonus-labs/circonusllhist v0.1.3/go.mod h1:kMXHVDlOchFAehlya5ePtbp5jckzBHf4XRpQvBOLI+I= +github.com/clbanning/x2j v0.0.0-20191024224557-825249438eec/go.mod h1:jMjuTZXRI4dUb/I5gc9Hdhagfvm9+RyrPryS/auMzxE= github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= +github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= +github.com/cncf/udpa/go v0.0.0-20210930031921-04548b0d99d4/go.mod h1:6pvJx4me5XPnfI9Z40ddWsdw2W/uZgQLFXToKeRcDiI= +github.com/cncf/xds/go v0.0.0-20210805033703-aa0b78936158/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= +github.com/cncf/xds/go v0.0.0-20210922020428-25de7278fc84/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= +github.com/cncf/xds/go v0.0.0-20211001041855-01bcc9b48dfe/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= +github.com/cncf/xds/go v0.0.0-20211011173535-cb28da3451f1/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= +github.com/cockroachdb/apd/v2 v2.0.2 h1:weh8u7Cneje73dDh+2tEVLUvyBc89iwepWCD8b8034E= +github.com/cockroachdb/apd/v2 v2.0.2/go.mod h1:DDxRlzC2lo3/vSlmSoS7JkqbbrARPuFOGr0B9pvN3Gw= +github.com/cockroachdb/datadriven v0.0.0-20190809214429-80d97fb3cbaa/go.mod h1:zn76sxSg3SzpJ0PPJaLDCu+Bu0Lg3sKTORVIj19EIF8= +github.com/cockroachdb/datadriven v1.0.3-0.20230413201302-be42291fc80f h1:otljaYPt5hWxV3MUfO5dFPFiOXg9CyG5/kCfayTqsJ4= +github.com/cockroachdb/datadriven v1.0.3-0.20230413201302-be42291fc80f/go.mod h1:a9RdTaap04u637JoCzcUoIcDmvwSUtcUFtT/C3kJlTU= +github.com/cockroachdb/errors v1.11.1 h1:xSEW75zKaKCWzR3OfxXUxgrk/NtT4G1MiOv5lWZazG8= +github.com/cockroachdb/errors v1.11.1/go.mod h1:8MUxA3Gi6b25tYlFEBGLf+D8aISL+M4MIpiWMSNRfxw= +github.com/cockroachdb/logtags v0.0.0-20230118201751-21c54148d20b h1:r6VH0faHjZeQy818SGhaone5OnYfxFR/+AzdY3sf5aE= +github.com/cockroachdb/logtags v0.0.0-20230118201751-21c54148d20b/go.mod h1:Vz9DsVWQQhf3vs21MhPMZpMGSht7O/2vFW2xusFUVOs= +github.com/cockroachdb/pebble v1.1.0 h1:pcFh8CdCIt2kmEpK0OIatq67Ln9uGDYY3d5XnE0LJG4= +github.com/cockroachdb/pebble v1.1.0/go.mod h1:sEHm5NOXxyiAoKWhoFxT8xMgd/f3RA6qUqQ1BXKrh2E= +github.com/cockroachdb/redact v1.1.5 h1:u1PMllDkdFfPWaNGMyLD1+so+aq3uUItthCFqzwPJ30= +github.com/cockroachdb/redact v1.1.5/go.mod h1:BVNblN9mBWFyMyqK1k3AAiSxhvhfK2oOZZ2lK+dpvRg= +github.com/cockroachdb/tokenbucket v0.0.0-20230807174530-cc333fc44b06 h1:zuQyyAKVxetITBuuhv3BI9cMrmStnpT18zmgmTxunpo= +github.com/cockroachdb/tokenbucket v0.0.0-20230807174530-cc333fc44b06/go.mod h1:7nc4anLGjupUW/PeY5qiNYsdNXj7zopG+eqsS7To5IQ= +github.com/codahale/hdrhistogram v0.0.0-20161010025455-3a0bb77429bd/go.mod h1:sE/e/2PUdi/liOCUjSTXgM1o87ZssimdTWN964YiIeI= +github.com/cometbft/cometbft v0.38.6 h1:QSgpCzrGWJ2KUq1qpw+FCfASRpE27T6LQbfEHscdyOk= +github.com/cometbft/cometbft v0.38.6/go.mod h1:8rSPxzUJYquCN8uuBgbUHOMg2KAwvr7CyUw+6ukO4nw= +github.com/cometbft/cometbft-db v0.9.1 h1:MIhVX5ja5bXNHF8EYrThkG9F7r9kSfv8BX4LWaxWJ4M= +github.com/cometbft/cometbft-db v0.9.1/go.mod h1:iliyWaoV0mRwBJoizElCwwRA9Tf7jZJOURcRZF9m60U= github.com/containerd/cgroups v0.0.0-20201119153540-4cbc285b3327/go.mod h1:ZJeTFisyysqgcCdecO57Dj79RfL0LNeGiFUqLYQRYLE= github.com/containerd/cgroups v1.1.0 h1:v8rEWFl6EoqHB+swVNjVoCJE8o3jX7e8nqBGPLaDFBM= github.com/containerd/cgroups v1.1.0/go.mod h1:6ppBcbh/NOOUU+dMKrykgaBnK9lCIBxHqJDGwsa1mIw= +github.com/containerd/continuity v0.3.0 h1:nisirsYROK15TAMVukJOUyGJjz4BNQJBVsNvAXZJ/eg= +github.com/containerd/continuity v0.3.0/go.mod h1:wJEAIwKOm/pBZuBd0JmeTvnLquTB1Ag8espWhkykbPM= +github.com/coreos/etcd v3.3.10+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE= +github.com/coreos/go-etcd v2.0.0+incompatible/go.mod h1:Jez6KQU2B/sWsbdaef3ED8NzMklzPG4d5KIOhIy30Tk= +github.com/coreos/go-semver v0.2.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk= +github.com/coreos/go-systemd v0.0.0-20180511133405-39ca1b05acc7/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= github.com/coreos/go-systemd v0.0.0-20181012123002-c6f51f82210d/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= github.com/coreos/go-systemd/v22 v22.1.0/go.mod h1:xO0FLkIi5MaZafQlIrOotqXZ90ih+1atmu1JpKERPPk= github.com/coreos/go-systemd/v22 v22.5.0 h1:RrqgGjYQKalulkV8NGVIfkXQf6YYmOyiJKk8iXXhfZs= github.com/coreos/go-systemd/v22 v22.5.0/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc= +github.com/coreos/pkg v0.0.0-20160727233714-3ac0863d7acf/go.mod h1:E3G3o1h8I7cfcXa63jLwjI0eiQQMgzzUDFVpN/nH/eA= +github.com/cosmos/btcutil v1.0.5 h1:t+ZFcX77LpKtDBhjucvnOH8C2l2ioGsBNEQ3jef8xFk= +github.com/cosmos/btcutil v1.0.5/go.mod h1:IyB7iuqZMJlthe2tkIFL33xPyzbFYP0XVdS8P5lUPis= +github.com/cosmos/cosmos-db v1.0.2 h1:hwMjozuY1OlJs/uh6vddqnk9j7VamLv+0DBlbEXbAKs= +github.com/cosmos/cosmos-db v1.0.2/go.mod h1:Z8IXcFJ9PqKK6BIsVOB3QXtkKoqUOp1vRvPT39kOXEA= +github.com/cosmos/cosmos-proto v1.0.0-beta.4 h1:aEL7tU/rLOmxZQ9z4i7mzxcLbSCY48OdY7lIWTLG7oU= +github.com/cosmos/cosmos-proto v1.0.0-beta.4/go.mod h1:oeB+FyVzG3XrQJbJng0EnV8Vljfk9XvTIpGILNU/9Co= +github.com/cosmos/cosmos-sdk v0.50.5 h1:MOEi+DKYgW67YaPgB+Pf+nHbD3V9S/ayitRKJYLfGIA= +github.com/cosmos/cosmos-sdk v0.50.5/go.mod h1:oV/k6GJgXV9QPoM2fsYDPPsyPBgQbdotv532O6Mz1OQ= +github.com/cosmos/go-bip39 v1.0.0 h1:pcomnQdrdH22njcAatO0yWojsUnCO3y2tNoV1cb6hHY= +github.com/cosmos/go-bip39 v1.0.0/go.mod h1:RNJv0H/pOIVgxw6KS7QeX2a0Uo0aKUlfhZ4xuwvCdJw= +github.com/cosmos/gogogateway v1.2.0 h1:Ae/OivNhp8DqBi/sh2A8a1D0y638GpL3tkmLQAiKxTE= +github.com/cosmos/gogogateway v1.2.0/go.mod h1:iQpLkGWxYcnCdz5iAdLcRBSw3h7NXeOkZ4GUkT+tbFI= +github.com/cosmos/gogoproto v1.4.2/go.mod h1:cLxOsn1ljAHSV527CHOtaIP91kK6cCrZETRBrkzItWU= +github.com/cosmos/gogoproto v1.4.12 h1:vB6Lbe/rtnYGjQuFxkPiPYiCybqFT8QvLipDZP8JpFE= +github.com/cosmos/gogoproto v1.4.12/go.mod h1:LnZob1bXRdUoqMMtwYlcR3wjiElmlC+FkjaZRv1/eLY= +github.com/cosmos/gorocksdb v1.2.0 h1:d0l3jJG8M4hBouIZq0mDUHZ+zjOx044J3nGRskwTb4Y= +github.com/cosmos/gorocksdb v1.2.0/go.mod h1:aaKvKItm514hKfNJpUJXnnOWeBnk2GL4+Qw9NHizILw= +github.com/cosmos/iavl v1.0.1 h1:D+mYbcRO2wptYzOM1Hxl9cpmmHU1ZEt9T2Wv5nZTeUw= +github.com/cosmos/iavl v1.0.1/go.mod h1:8xIUkgVvwvVrBu81scdPty+/Dx9GqwHnAvXz4cwF7RY= +github.com/cosmos/ics23/go v0.10.0 h1:iXqLLgp2Lp+EdpIuwXTYIQU+AiHj9mOC2X9ab++bZDM= +github.com/cosmos/ics23/go v0.10.0/go.mod h1:ZfJSmng/TBNTBkFemHHHj5YY7VAU/MBU980F4VU1NG0= +github.com/cosmos/ledger-cosmos-go v0.13.3 h1:7ehuBGuyIytsXbd4MP43mLeoN2LTOEnk5nvue4rK+yM= +github.com/cosmos/ledger-cosmos-go v0.13.3/go.mod h1:HENcEP+VtahZFw38HZ3+LS3Iv5XV6svsnkk9vdJtLr8= +github.com/cpuguy83/go-md2man v1.0.10/go.mod h1:SmD6nW6nTyfqj6ABTjUi3V3JVMnlJmwcJI5acqYI6dE= github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= github.com/cpuguy83/go-md2man/v2 v2.0.0/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= github.com/cpuguy83/go-md2man/v2 v2.0.3 h1:qMCsGGgs+MAzDFyp9LpAe1Lqy/fY/qCovCm0qnXZOBM= github.com/cpuguy83/go-md2man/v2 v2.0.3/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= github.com/crackcomm/go-gitignore v0.0.0-20231225121904-e25f5bc08668 h1:ZFUue+PNxmHlu7pYv+IYMtqlaO/0VwaGEqKepZf9JpA= github.com/crackcomm/go-gitignore v0.0.0-20231225121904-e25f5bc08668/go.mod h1:p1d6YEZWvFzEh4KLyvBcVSnrfNDDvK2zfK/4x2v/4pE= +github.com/creack/pty v1.1.7/go.mod h1:lj5s0c3V2DBrqTV7llrYr5NG6My20zk30Fl46Y7DoTY= +github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= github.com/cskr/pubsub v1.0.2 h1:vlOzMhl6PFn60gRlTQQsIfVwaPB/B/8MziK8FhEPt/0= github.com/cskr/pubsub v1.0.2/go.mod h1:/8MzYXk/NJAz782G8RPkFzXTZVu63VotefPnR9TIRis= +github.com/danieljoos/wincred v1.2.0 h1:ozqKHaLK0W/ii4KVbbvluM91W2H3Sh0BncbUNPS7jLE= +github.com/danieljoos/wincred v1.2.0/go.mod h1:FzQLLMKBFdvu+osBrnFODiv32YGwCfx0SkRa/eYHgec= +github.com/davecgh/go-spew v0.0.0-20171005155431-ecdeabc65495/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM= @@ -66,45 +233,91 @@ github.com/decred/dcrd/crypto/blake256 v1.0.1 h1:7PltbUIQB7u/FfZ39+DGa/ShuMyJ5il github.com/decred/dcrd/crypto/blake256 v1.0.1/go.mod h1:2OfgNZ5wDpcsFmHmCK5gZTPcCXqlm2ArzUIkw9czNJo= github.com/decred/dcrd/dcrec/secp256k1/v4 v4.2.0 h1:8UrgZ3GkP4i/CLijOJx79Yu+etlyjdBU4sfcs2WYQMs= github.com/decred/dcrd/dcrec/secp256k1/v4 v4.2.0/go.mod h1:v57UDF4pDQJcEfFUCRop3lJL149eHGSe9Jvczhzjo/0= +github.com/desertbit/timer v0.0.0-20180107155436-c41aec40b27f h1:U5y3Y5UE0w7amNe7Z5G/twsBW0KEalRQXZzf8ufSh9I= +github.com/desertbit/timer v0.0.0-20180107155436-c41aec40b27f/go.mod h1:xH/i4TFMt8koVQZ6WFms69WAsDWr2XsYL3Hkl7jkoLE= github.com/dgraph-io/badger v1.6.2 h1:mNw0qs90GVgGGWylh0umH5iag1j6n/PeJtNvL6KY/x8= github.com/dgraph-io/badger v1.6.2/go.mod h1:JW2yswe3V058sS0kZ2h/AXeDSqFjxnZcRrVH//y2UQE= +github.com/dgraph-io/badger/v2 v2.2007.4 h1:TRWBQg8UrlUhaFdco01nO2uXwzKS7zd+HVdwV/GHc4o= +github.com/dgraph-io/badger/v2 v2.2007.4/go.mod h1:vSw/ax2qojzbN6eXHIx6KPKtCSHJN/Uz0X0VPruTIhk= github.com/dgraph-io/badger/v3 v3.2011.1 h1:Hmyof0WMEF/QtutX5SQHzIMnJQxb/IrSzhjckV2SD6g= github.com/dgraph-io/badger/v3 v3.2011.1/go.mod h1:0rLLrQpKVQAL0or/lBLMQznhr6dWWX7h5AKnmnqx268= +github.com/dgraph-io/ristretto v0.0.3-0.20200630154024-f66de99634de/go.mod h1:KPxhHT9ZxKefz+PCeOGsrHpl1qZ7i70dGTu2u+Ahh6E= github.com/dgraph-io/ristretto v0.1.1 h1:6CWw5tJNgpegArSHpNHJKldNeq03FQCwYvfMVWajOK8= github.com/dgraph-io/ristretto v0.1.1/go.mod h1:S1GPSBCYCIhmVNfcth17y2zZtQT6wzkzgwUve0VDWWA= -github.com/dgryski/go-farm v0.0.0-20190423205320-6a90982ecee2 h1:tdlZCpZ/P9DhczCTSixgIKmwPv6+wP5DGjqLYw5SUiA= +github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ= github.com/dgryski/go-farm v0.0.0-20190423205320-6a90982ecee2/go.mod h1:SqUrOPUnsFjfmXRMNPybcSiG0BgUW2AuFH8PAnS2iTw= +github.com/dgryski/go-farm v0.0.0-20200201041132-a6ae2369ad13 h1:fAjc9m62+UWV/WAFKLNi6ZS0675eEUC9y3AlwSbQu1Y= +github.com/dgryski/go-farm v0.0.0-20200201041132-a6ae2369ad13/go.mod h1:SqUrOPUnsFjfmXRMNPybcSiG0BgUW2AuFH8PAnS2iTw= +github.com/docker/go-connections v0.4.0 h1:El9xVISelRB7BuFusrZozjnkIM5YnzCViNKohAFqRJQ= +github.com/docker/go-connections v0.4.0/go.mod h1:Gbd7IOopHjR8Iph03tsViu4nIes5XhDvyHbTtUxmeec= github.com/docker/go-units v0.4.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk= github.com/docker/go-units v0.5.0 h1:69rxXcBk27SvSaaxTtLh/8llcHD8vYHT7WSdRZ/jvr4= github.com/docker/go-units v0.5.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk= +github.com/dustin/go-humanize v0.0.0-20171111073723-bb3d318650d4/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk= github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk= github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY= github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto= +github.com/dvsekhvalnov/jose2go v1.6.0 h1:Y9gnSnP4qEI0+/uQkHvFXeD2PLPJeXEL+ySMEA2EjTY= +github.com/dvsekhvalnov/jose2go v1.6.0/go.mod h1:QsHjhyTlD/lAVqn/NSbVZmSCGeDehTB/mPZadG+mhXU= +github.com/eapache/go-resiliency v1.1.0/go.mod h1:kFI+JgMyC7bLPUVY133qvEBtVayf5mFgVsvEsIPBvNs= +github.com/eapache/go-xerial-snappy v0.0.0-20180814174437-776d5712da21/go.mod h1:+020luEh2TKB4/GOp8oxxtq0Daoen/Cii55CzbTV6DU= +github.com/eapache/queue v1.1.0/go.mod h1:6eCeP0CKFpHLu8blIFXhExK/dRa7WDZfr6jVFPTqq+I= +github.com/edsrzf/mmap-go v1.0.0/go.mod h1:YO35OhQPt3KJa3ryjFM5Bs14WD66h8eGKpfaBNrHW5M= github.com/elastic/gosigar v0.12.0/go.mod h1:iXRIGg2tLnu7LBdpqzyQfGDEidKCfWcCMS0WKyPWoMs= github.com/elastic/gosigar v0.14.2 h1:Dg80n8cr90OZ7x+bAax/QjoW/XqTI11RmA79ZwIm9/4= github.com/elastic/gosigar v0.14.2/go.mod h1:iXRIGg2tLnu7LBdpqzyQfGDEidKCfWcCMS0WKyPWoMs= +github.com/emicklei/dot v1.6.1 h1:ujpDlBkkwgWUY+qPId5IwapRW/xEoligRSYjioR6DFI= +github.com/emicklei/dot v1.6.1/go.mod h1:DeV7GvQtIw4h2u73RKBkkFdvVAz0D9fzeJrgPW6gy/s= +github.com/envoyproxy/go-control-plane v0.6.9/go.mod h1:SBwIajubJHhxtWwsL9s8ss4safvEdbitLhGGK48rN6g= github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98= +github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= +github.com/envoyproxy/go-control-plane v0.9.10-0.20210907150352-cf90f659a021/go.mod h1:AFq3mo9L8Lqqiid3OhADV3RfLJnjiw63cSpi+fDTRC0= +github.com/envoyproxy/go-control-plane v0.10.2-0.20220325020618-49ff273808a1/go.mod h1:KJwIaB5Mv44NWtYuAOFCVOjcI94vtpEz2JU/D2v6IjE= github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= github.com/evanphx/json-patch/v5 v5.9.0 h1:kcBlZQbplgElYIlo/n1hJbls2z/1awpXxpRi0/FOJfg= github.com/evanphx/json-patch/v5 v5.9.0/go.mod h1:VNkHZ/282BpEyt/tObQO8s5CMPmYYq14uClGH4abBuQ= +github.com/facebookgo/ensure v0.0.0-20200202191622-63f1cf65ac4c h1:8ISkoahWXwZR41ois5lSJBSVw4D0OV19Ht/JSTzvSv0= +github.com/facebookgo/ensure v0.0.0-20200202191622-63f1cf65ac4c/go.mod h1:Yg+htXGokKKdzcwhuNDwVvN+uBxDGXJ7G/VN1d8fa64= +github.com/facebookgo/stack v0.0.0-20160209184415-751773369052 h1:JWuenKqqX8nojtoVVWjGfOF9635RETekkoH6Cc9SX0A= +github.com/facebookgo/stack v0.0.0-20160209184415-751773369052/go.mod h1:UbMTZqLaRiH3MsBH8va0n7s1pQYcu3uTb8G4tygF4Zg= +github.com/facebookgo/subset v0.0.0-20200203212716-c811ad88dec4 h1:7HZCaLC5+BZpmbhCOZJ293Lz68O7PYrF2EzeiFMwCLk= +github.com/facebookgo/subset v0.0.0-20200203212716-c811ad88dec4/go.mod h1:5tD+neXqOorC30/tWg0LCSkrqj/AR6gu8yY8/fpw1q0= +github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4= +github.com/fatih/color v1.13.0/go.mod h1:kLAiJbzzSOZDVNGyDpeOxJ47H46qBXwg5ILebYFFOfk= +github.com/fatih/color v1.15.0 h1:kOqh6YHBtK8aywxGerMG2Eq3H6Qgoqeo13Bk2Mv/nBs= +github.com/fatih/color v1.15.0/go.mod h1:0h5ZqXfHYED7Bhv2ZJamyIOUej9KtShiJESRwBDUSsw= +github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg= +github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U= github.com/flynn/go-shlex v0.0.0-20150515145356-3f9db97f8568/go.mod h1:xEzjJPgXI435gkrCt3MPfRiAkVrwSbHsst4LCFVfpJc= -github.com/flynn/noise v1.0.1 h1:vPp/jdQLXC6ppsXSj/pM3W1BIJ5FEHE2TulSJBpb43Y= -github.com/flynn/noise v1.0.1/go.mod h1:xbMo+0i6+IGbYdJhF31t2eR1BIU0CYc12+BNAKwUTag= +github.com/flynn/noise v1.1.0 h1:KjPQoQCEFdZDiP03phOvGi11+SVVhBG2wOWAorLsstg= +github.com/flynn/noise v1.1.0/go.mod h1:xbMo+0i6+IGbYdJhF31t2eR1BIU0CYc12+BNAKwUTag= +github.com/fortytw2/leaktest v1.3.0 h1:u8491cBMTQ8ft8aeV+adlcytMZylmA5nnwwkRZjI8vw= +github.com/fortytw2/leaktest v1.3.0/go.mod h1:jDsjWgpAGjm2CA7WthBh/CdZYEPF31XHquHwclZch5g= github.com/francoispqt/gojay v1.2.13 h1:d2m3sFjloqoIUQU3TsHBgj6qg/BVGlTBeHDUmyJnXKk= github.com/francoispqt/gojay v1.2.13/go.mod h1:ehT5mTG4ua4581f1++1WLG0vPdaA9HaiDsoyrBGkyDY= +github.com/franela/goblin v0.0.0-20200105215937-c9ffbefa60db/go.mod h1:7dvUGVsVBjqR7JHJk0brhHOZYGmfBYOrK0ZhYMEtBr4= +github.com/franela/goreq v0.0.0-20171204163338-bcd34c9993f8/go.mod h1:ZhphrRTfi2rbfLwlschooIH4+wKKDR4Pdxhh+TRoA20= github.com/frankban/quicktest v1.14.6 h1:7Xjx+VpznH+oBnejlPUj8oUpdxnVs4f8XU8WnHkI4W8= github.com/frankban/quicktest v1.14.6/go.mod h1:4ptaffx2x8+WTWXmUCuVU6aPUX1/Mz7zb5vbUoiM6w0= github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ= +github.com/fsnotify/fsnotify v1.5.4/go.mod h1:OVB6XrOHzAwXMpEM7uPOzcehqUV2UqJxmVXmkdnm1bU= github.com/fsnotify/fsnotify v1.7.0 h1:8JEhPFa5W2WU7YfeZzPNqzMP6Lwt7L2715Ggo0nosvA= github.com/fsnotify/fsnotify v1.7.0/go.mod h1:40Bi/Hjc2AVfZrqy+aj+yEI+/bRxZnMJyTJwOpGvigM= github.com/fxamacker/cbor/v2 v2.6.0 h1:sU6J2usfADwWlYDAFhZBQ6TnLFBHxgesMrQfQgk1tWA= github.com/fxamacker/cbor/v2 v2.6.0/go.mod h1:pxXPTn3joSm21Gbwsv0w9OSA2y1HFR9qXEeXQVeNoDQ= -github.com/getkin/kin-openapi v0.123.0 h1:zIik0mRwFNLyvtXK274Q6ut+dPh6nlxBp0x7mNrPhs8= -github.com/getkin/kin-openapi v0.123.0/go.mod h1:wb1aSZA/iWmorQP9KTAS/phLj/t17B5jT7+fS8ed9NM= +github.com/getkin/kin-openapi v0.124.0 h1:VSFNMB9C9rTKBnQ/fpyDU8ytMTr4dWI9QovSKj9kz/M= +github.com/getkin/kin-openapi v0.124.0/go.mod h1:wb1aSZA/iWmorQP9KTAS/phLj/t17B5jT7+fS8ed9NM= +github.com/getsentry/sentry-go v0.27.0 h1:Pv98CIbtB3LkMWmXi4Joa5OOcwbmnX88sF5qbK3r3Ps= +github.com/getsentry/sentry-go v0.27.0/go.mod h1:lc76E2QywIyW8WuBnwl8Lc4bkmQH4+w1gwTf25trprY= github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= +github.com/gin-contrib/sse v0.1.0 h1:Y/yl/+YNO8GZSjAhjMsSuLt29uWRFHdHYUb5lYOV9qE= +github.com/gin-contrib/sse v0.1.0/go.mod h1:RHrZQHXnP2xjPF+u1gW/2HnVO7nvIa9PG3Gm+fLHvGI= +github.com/gin-gonic/gin v1.6.3/go.mod h1:75u5sXoLsGZoRN5Sgbi1eraJ4GU3++wFwWzhwvtwp4M= +github.com/gin-gonic/gin v1.8.1 h1:4+fr/el88TOO3ewCmQr8cx/CtZ/umlIRIs5M4NTNjf8= +github.com/gin-gonic/gin v1.8.1/go.mod h1:ji8BvRH1azfM+SYow9zQ6SZMvR8qOMZHmsCuWR9tTTk= github.com/gliderlabs/ssh v0.1.1/go.mod h1:U7qILu1NlMHj9FlMhZLlkCdDnU1DBEAqr0aevW3Awn0= github.com/go-chi/chi/v5 v5.0.12 h1:9euLV5sTrTNTRUU9POmDUvfxyj6LAABLUcEWO+JJb4s= github.com/go-chi/chi/v5 v5.0.12/go.mod h1:DslCQbL2OYiznFReuXYUmQ2hGd1aDpCnlMNITLSKoi8= @@ -113,6 +326,22 @@ github.com/go-chi/cors v1.2.1/go.mod h1:sSbTewc+6wYHBBCW7ytsFSn836hqM7JxpglAy2Vz github.com/go-errors/errors v1.0.1/go.mod h1:f4zRHt4oKfwPJE5k8C9vpYG+aDHdBFUsgrm6/TyX73Q= github.com/go-errors/errors v1.5.1 h1:ZwEMSLRCapFLflTpT7NKaAc7ukJ8ZPEjzlxt8rPN8bk= github.com/go-errors/errors v1.5.1/go.mod h1:sIVyrIiJhuEF+Pj9Ebtd6P/rEYROXFi3BopGUQ5a5Og= +github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= +github.com/go-jose/go-jose/v3 v3.0.3 h1:fFKWeig/irsp7XD2zBxvnmA/XaRWp5V3CBsZXJF7G7k= +github.com/go-jose/go-jose/v3 v3.0.3/go.mod h1:5b+7YgP7ZICgJDBdfjZaIt+H/9L9T/YQrVfLAMboGkQ= +github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= +github.com/go-kit/kit v0.9.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= +github.com/go-kit/kit v0.10.0/go.mod h1:xUsJbQ/Fp4kEt7AFgCuvyX4a71u8h9jB8tj/ORgOZ7o= +github.com/go-kit/kit v0.12.0 h1:e4o3o3IsBfAKQh5Qbbiqyfu97Ku7jrO/JbohvztANh4= +github.com/go-kit/kit v0.12.0/go.mod h1:lHd+EkCZPIwYItmGDDRdhinkzX2A1sj+M9biaEaizzs= +github.com/go-kit/log v0.1.0/go.mod h1:zbhenjAZHb184qTLMA9ZjW7ThYL0H2mk7Q6pNt4vbaY= +github.com/go-kit/log v0.2.1 h1:MRVx0/zhvdseW+Gza6N9rVzU/IVzaeE1SFI4raAhmBU= +github.com/go-kit/log v0.2.1/go.mod h1:NwTd00d/i8cPZ3xOwwiv2PO5MOcx78fFErGNcVmBjv0= +github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE= +github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk= +github.com/go-logfmt/logfmt v0.5.0/go.mod h1:wCYkCAKZfumFQihp8CzCvQ3paCTfi41vtzG1KdI/P7A= +github.com/go-logfmt/logfmt v0.6.0 h1:wGYYu3uicYdqXVgoYbvnkrPVXkuLM1p1ifugDMEdRi4= +github.com/go-logfmt/logfmt v0.6.0/go.mod h1:WYhtIu8zTZfxdn5+rREduYbwxfcBr/Vr6KEVveWlfTs= github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= github.com/go-logr/logr v1.4.1 h1:pKouT5E8xu9zeFC39JXRDukb6JFQPXM5p5I91188VAQ= github.com/go-logr/logr v1.4.1/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= @@ -122,34 +351,72 @@ github.com/go-openapi/jsonpointer v0.20.2 h1:mQc3nmndL8ZBzStEo3JYF8wzmeWffDH4VbX github.com/go-openapi/jsonpointer v0.20.2/go.mod h1:bHen+N0u1KEO3YlmqOjTT9Adn1RfD91Ar825/PuiRVs= github.com/go-openapi/swag v0.22.8 h1:/9RjDSQ0vbFR+NyjGMkFTsA1IA0fmhKSThmfGZjicbw= github.com/go-openapi/swag v0.22.8/go.mod h1:6QT22icPLEqAM/z/TChgb4WAveCHF92+2gF0CNjHpPI= +github.com/go-playground/assert/v2 v2.0.1/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4= +github.com/go-playground/locales v0.13.0/go.mod h1:taPMhCMXrRLJO55olJkUXHZBHCxTMfnGwq/HNwmWNS8= +github.com/go-playground/locales v0.14.0 h1:u50s323jtVGugKlcYeyzC0etD1HifMjqmJqb8WugfUU= +github.com/go-playground/locales v0.14.0/go.mod h1:sawfccIbzZTqEDETgFXqTho0QybSa7l++s0DH+LDiLs= +github.com/go-playground/universal-translator v0.17.0/go.mod h1:UkSxE5sNxxRwHyU+Scu5vgOQjsIJAF8j9muTVoKLVtA= +github.com/go-playground/universal-translator v0.18.0 h1:82dyy6p4OuJq4/CByFNOn/jYrnRPArHwAcmLoJZxyho= +github.com/go-playground/universal-translator v0.18.0/go.mod h1:UvRDBj+xPUEGrFYl+lu/H90nyDXpg0fqeB/AQUGNTVA= +github.com/go-playground/validator/v10 v10.2.0/go.mod h1:uOYAAleCW8F/7oMFd6aG0GOhaH6EGOAJShg8Id5JGkI= +github.com/go-playground/validator/v10 v10.13.0 h1:cFRQdfaSMCOSfGCCLB20MHvuoHb/s5G8L5pu2ppK5AQ= +github.com/go-playground/validator/v10 v10.13.0/go.mod h1:dwu7+CG8/CtBiJFZDz4e+5Upb6OLw04gtBYw0mcG/z4= +github.com/go-sql-driver/mysql v1.4.0/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG5ZlKdlhCg5w= +github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= github.com/go-task/slim-sprig v0.0.0-20210107165309-348f09dbbbc0/go.mod h1:fyg7847qk6SyHyPtNmDHnmrv/HOrqktSC+C9fM+CJOE= github.com/go-task/slim-sprig v0.0.0-20230315185526-52ccab3ef572 h1:tfuBGBXKqDEevZMzYi5KSi8KkcZtzBcTgAUUtapy0OI= github.com/go-task/slim-sprig v0.0.0-20230315185526-52ccab3ef572/go.mod h1:9Pwr4B2jHnOSGXyyzV8ROjYa2ojvAY6HCGYYfMoC3Ls= github.com/go-test/deep v1.0.8 h1:TDsG77qcSprGbC6vTN8OuXp5g+J+b5Pcguhf7Zt61VM= github.com/go-test/deep v1.0.8/go.mod h1:5C2ZWiW0ErCdrYzpqxLbTX7MG14M9iiw8DgHncVwcsE= github.com/go-yaml/yaml v2.1.0+incompatible/go.mod h1:w2MrLa16VYP0jy6N7M5kHaCkaLENm+P+Tv+MfurjSw0= +github.com/gobwas/httphead v0.0.0-20180130184737-2c6c146eadee/go.mod h1:L0fX3K22YWvt/FAX9NnzrNzcI4wNYi9Yku4O0LKYflo= +github.com/gobwas/httphead v0.1.0 h1:exrUm0f4YX0L7EBwZHuCF4GDp8aJfVeBrlLQrs6NqWU= +github.com/gobwas/httphead v0.1.0/go.mod h1:O/RXo79gxV8G+RqlR/otEwx4Q36zl9rqC5u12GKvMCM= +github.com/gobwas/pool v0.2.0/go.mod h1:q8bcK0KcYlCgd9e7WYLm9LpyS+YeLd8JVDW6WezmKEw= +github.com/gobwas/pool v0.2.1 h1:xfeeEhW7pwmX8nuLVlqbzVc7udMDrwetjEv+TZIz1og= +github.com/gobwas/pool v0.2.1/go.mod h1:q8bcK0KcYlCgd9e7WYLm9LpyS+YeLd8JVDW6WezmKEw= +github.com/gobwas/ws v1.0.2/go.mod h1:szmBTxLgaFppYjEmNtny/v3w89xOydFnnZMcgRRu/EM= +github.com/gobwas/ws v1.2.1 h1:F2aeBZrm2NDsc7vbovKrWSogd4wvfAxg0FQ89/iqOTk= +github.com/gobwas/ws v1.2.1/go.mod h1:hRKAFb8wOxFROYNsT1bqfWnhX+b5MFeJM9r2ZSwg/KY= +github.com/goccy/go-json v0.9.11 h1:/pAaQDLHEoCq/5FFmSKBswWmK6H0e8g4159Kc/X/nqk= +github.com/goccy/go-json v0.9.11/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I= +github.com/godbus/dbus v0.0.0-20190726142602-4481cbc300e2 h1:ZpnhV/YsD2/4cESfV5+Hoeu/iUR3ruzNvZ+yQfO03a0= +github.com/godbus/dbus v0.0.0-20190726142602-4481cbc300e2/go.mod h1:bBOAhwG1umN6/6ZUMtDFBMQR8jRg9O75tm9K00oMsK4= github.com/godbus/dbus/v5 v5.0.3/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA= github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA= github.com/godbus/dbus/v5 v5.1.0 h1:4KLkAxT3aOY8Li4FRJe/KvhoNFFxo0m6fNuFUO8QJUk= github.com/godbus/dbus/v5 v5.1.0/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA= -github.com/gofrs/uuid/v5 v5.0.0 h1:p544++a97kEL+svbcFbCQVM9KFu0Yo25UoISXGNNH9M= -github.com/gofrs/uuid/v5 v5.0.0/go.mod h1:CDOjlDMVAtN56jqyRUZh58JT31Tiw7/oQyEXZV+9bD8= +github.com/gofrs/uuid/v5 v5.1.0 h1:S5rqVKIigghZTCBKPCw0Y+bXkn26K3TB5mvQq2Ix8dk= +github.com/gofrs/uuid/v5 v5.1.0/go.mod h1:CDOjlDMVAtN56jqyRUZh58JT31Tiw7/oQyEXZV+9bD8= +github.com/gogo/googleapis v1.1.0/go.mod h1:gf4bu3Q80BeJ6H1S1vYPm8/ELATdvryBaNFGgqEef3s= +github.com/gogo/googleapis v1.4.1-0.20201022092350-68b0159b7869/go.mod h1:5YRNX2z1oM5gXdAkurHa942MDgEJyk02w4OecKY87+c= +github.com/gogo/googleapis v1.4.1 h1:1Yx4Myt7BxzvUr5ldGSbwYiZG6t9wGBZ+8/fX3Wvtq0= +github.com/gogo/googleapis v1.4.1/go.mod h1:2lpHqI5OcWCtVElxXnPt+s8oJvMpySlOyM6xDCrzib4= github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= +github.com/gogo/protobuf v1.2.0/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= +github.com/gogo/protobuf v1.2.1/go.mod h1:hp+jE20tsWTFYpLwKvXlhS1hjn+gTNwPg2I6zVXpSg4= github.com/gogo/protobuf v1.3.1/go.mod h1:SlYgWuQ5SjCEi6WLHjHCa1yvBfUnHcTbrrZtXPKa29o= github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= github.com/golang/glog v1.2.0 h1:uCdmnmatrKCgMBlM4rMuJZWOkPDqdbZPnrMXDY4gI68= github.com/golang/glog v1.2.0/go.mod h1:6AhwSGph0fcJtXVM/PEHPqZlFeoLxhs7/t5UDAwmO+w= +github.com/golang/groupcache v0.0.0-20160516000752-02826c3e7903/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da h1:oI5xCqsCo564l8iNU+DwB5epxmsaqB+rhGL0m5jtYqE= github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/lint v0.0.0-20180702182130-06c8688daad7/go.mod h1:tluoj9z5200jBnyusfRPU2LqT6J+DAorxEvtC7LHB+E= github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= +github.com/golang/mock v1.6.0 h1:ErTB+efbowRARo13NNdxyJji2egdxLGQhRaY+DUumQc= +github.com/golang/mock v1.6.0/go.mod h1:p6yTPP+5HYm5mzsMV8JkE6ZKdX+/wYM6Hr+LicevLPs= github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.0/go.mod h1:Qd/q+1AKNOZr9uGQzbzCmRO6sUih6GTPZv6a1/R87v0= github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= +github.com/golang/protobuf v1.3.5/go.mod h1:6O5/vntMXwX2lRkT1hjjk0nAC1IDOTvTlVgjlRvqsdk= github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8= github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA= github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs= @@ -159,11 +426,17 @@ github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QD github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= -github.com/golang/protobuf v1.5.3 h1:KhyjKVUg7Usr/dYsdSqoFveMYd5ko72D+zANwlG1mmg= -github.com/golang/protobuf v1.5.3/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= +github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= +github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek= +github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps= +github.com/golang/snappy v0.0.0-20180518054509-2e65f85255db/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= +github.com/golang/snappy v0.0.3/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= github.com/golang/snappy v0.0.4 h1:yAGX7huGHXlcLOEtBnF4w7FQwA26wojNCwOYAEhLjQM= github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= +github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= +github.com/google/btree v1.1.2 h1:xf4v41cLI2Z6FxbKm+8Bu+m8ifhj15JuZ9sa0jZCMUU= +github.com/google/btree v1.1.2/go.mod h1:qOPhT0dTNdNzV6Z/lhRX0YXUafgPLFUh+gZMl761Gm4= github.com/google/flatbuffers v2.0.6+incompatible h1:XHFReMv7nFFusa+CEokzWbzaYocKXI6C7hdU5Kgh9Lw= github.com/google/flatbuffers v2.0.6+incompatible/go.mod h1:1AeVuKshWv4vARoZatz6mlQ0JxURH0Kv5+zNeJKJCa8= github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= @@ -174,17 +447,29 @@ github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/ github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.3/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/go-github v17.0.0+incompatible/go.mod h1:zLgOLi98H3fifZn+44m+umXrS52loVEgC2AApnigrVQ= github.com/google/go-querystring v1.0.0/go.mod h1:odCYkC5MyYFN7vkCjXpyrEuKhc/BUO6wN/zVPAxq5ck= +github.com/google/gofuzz v0.0.0-20170612174753-24818f796faf/go.mod h1:HP5RmnzzSNb993RKQDq4+1A4ia9nllfqcQFTQJedwGI= +github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= +github.com/google/gofuzz v1.2.0 h1:xRy4A+RhZaiKjJ1bPfwQ8sedCA+YS2YcCHW6ec7JMi0= +github.com/google/gofuzz v1.2.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= github.com/google/gopacket v1.1.19 h1:ves8RnFZPGiFnTS0uPQStjwru6uO6h+nlr9j6fL7kF8= github.com/google/gopacket v1.1.19/go.mod h1:iJ8V8n6KS+z2U1A8pUwu8bW5SyEMkXJB8Yo/Vo+TKTo= github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= +github.com/google/orderedcode v0.0.1 h1:UzfcAexk9Vhv8+9pNOgRu41f16lHq725vPwnSeiG/Us= +github.com/google/orderedcode v0.0.1/go.mod h1:iVyU4/qPKHY5h/wSd6rZZCDcLJNxiWO6dvsYES2Sb20= github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= -github.com/google/pprof v0.0.0-20231229205709-960ae82b1e42 h1:dHLYa5D8/Ta0aLR2XcPsrkpAgGeFs6thhMcQK0oQ0n8= -github.com/google/pprof v0.0.0-20231229205709-960ae82b1e42/go.mod h1:czg5+yv1E0ZGTi6S6vVK1mke0fV+FaUhNGcd6VRS9Ik= +github.com/google/pprof v0.0.0-20210407192527-94a9f03dee38/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20240207164012-fb44976bdcd5 h1:E/LAvt58di64hlYjx7AsNS6C/ysHWYo+2qPCZKTQhRo= +github.com/google/pprof v0.0.0-20240207164012-fb44976bdcd5/go.mod h1:czg5+yv1E0ZGTi6S6vVK1mke0fV+FaUhNGcd6VRS9Ik= github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= +github.com/google/tink/go v1.7.0 h1:6Eox8zONGebBFcCBqkVmt60LaWZa6xg1cl/DwAh/J1w= +github.com/google/tink/go v1.7.0/go.mod h1:GAUOd+QE3pgj9q8VKIGTCP33c/B7eb4NhxLcgTJZStM= +github.com/google/uuid v1.0.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= @@ -193,15 +478,62 @@ github.com/googleapis/gax-go/v2 v2.0.3/go.mod h1:LLvjysVCY1JZeum8Z6l8qUty8fiNwE0 github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= github.com/gopherjs/gopherjs v0.0.0-20190812055157-5d271430af9f h1:KMlcu9X58lhTA/KrfX8Bi1LQSO4pzoVjTiL3h4Jk+Zk= github.com/gopherjs/gopherjs v0.0.0-20190812055157-5d271430af9f/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= -github.com/gorilla/websocket v1.5.0 h1:PPwGk2jz7EePpoHN/+ClbZu8SPxiqlu12wZP/3sWmnc= -github.com/gorilla/websocket v1.5.0/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= +github.com/gorilla/context v1.1.1/go.mod h1:kBGZzfjB9CEq2AlWe17Uuf7NDRt0dE0s8S51q0aT7Yg= +github.com/gorilla/handlers v1.5.2 h1:cLTUSsNkgcwhgRqvCNmdbRWG0A3N4F+M2nWKdScwyEE= +github.com/gorilla/handlers v1.5.2/go.mod h1:dX+xVpaxdSw+q0Qek8SSsl3dfMk3jNddUkMzo0GtH0w= +github.com/gorilla/mux v1.6.2/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2zaAs= +github.com/gorilla/mux v1.7.3/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2zaAs= +github.com/gorilla/mux v1.8.1 h1:TuBL49tXwgrFYWhqrNgrUNEY92u81SPhu7sTdzQEiWY= +github.com/gorilla/mux v1.8.1/go.mod h1:AKf9I4AEqPTmMytcMc0KkNouC66V3BtZ4qD5fmWSiMQ= +github.com/gorilla/websocket v0.0.0-20170926233335-4201258b820c/go.mod h1:E7qHFY5m1UJ88s3WnNqhKjPHQ0heANvMoAMk2YaljkQ= +github.com/gorilla/websocket v1.4.1/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= +github.com/gorilla/websocket v1.5.1 h1:gmztn0JnHVt9JZquRuzLw3g4wouNVzKL15iLr/zn/QY= +github.com/gorilla/websocket v1.5.1/go.mod h1:x3kM2JMyaluk02fnUJpQuwD2dCS5NDG2ZHL0uE0tcaY= github.com/gregjones/httpcache v0.0.0-20180305231024-9cad4c3443a7/go.mod h1:FecbI9+v66THATjSRHfNgh1IVFe/9kFxbXtjV0ctIMA= +github.com/grpc-ecosystem/go-grpc-middleware v1.0.1-0.20190118093823-f849b5445de4/go.mod h1:FiyG127CGDf3tlThmgyCl78X/SZQqEOJBCDaAfeWzPs= +github.com/grpc-ecosystem/go-grpc-middleware v1.2.2/go.mod h1:EaizFBKfUKtMIF5iaDEhniwNedqGo9FuLFzppDr3uwI= +github.com/grpc-ecosystem/go-grpc-middleware v1.4.0 h1:UH//fgunKIs4JdUbpDl1VZCDaL56wXCB/5+wF6uHfaI= +github.com/grpc-ecosystem/go-grpc-middleware v1.4.0/go.mod h1:g5qyo/la0ALbONm6Vbp88Yd8NsDy6rZz+RcrMPxvld8= +github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0/go.mod h1:8NvIoxWQoOIhqOTXgfV/d3M/q6VIi02HzZEHgUlZvzk= github.com/grpc-ecosystem/grpc-gateway v1.5.0/go.mod h1:RSKVYQBd5MCa4OVpNdGskqpgL2+G+NZTnrVHpWWfpdw= +github.com/grpc-ecosystem/grpc-gateway v1.9.5/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY= +github.com/grpc-ecosystem/grpc-gateway v1.16.0 h1:gmcG1KaJ57LophUzW0Hy8NmPhnMZb4M0+kPpLofRdBo= +github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw= +github.com/grpc-ecosystem/grpc-gateway/v2 v2.19.0 h1:Wqo399gCIufwto+VfwCSvsnfGpF/w5E9CNxSwbpD6No= +github.com/grpc-ecosystem/grpc-gateway/v2 v2.19.0/go.mod h1:qmOFXW2epJhM0qSnUUYpldc7gVz2KMQwJ/QYCDIa7XU= +github.com/gsterjov/go-libsecret v0.0.0-20161001094733-a6f4afe4910c h1:6rhixN/i8ZofjG1Y75iExal34USq5p+wiN1tpie8IrU= +github.com/gsterjov/go-libsecret v0.0.0-20161001094733-a6f4afe4910c/go.mod h1:NMPJylDgVpX0MLRlPy15sqSwOFv/U1GZ2m21JhFfek0= +github.com/hashicorp/consul/api v1.3.0/go.mod h1:MmDNSzIMUjNpY/mQ398R4bk2FnqQLoPndWW5VkKPlCE= +github.com/hashicorp/consul/sdk v0.3.0/go.mod h1:VKf9jXwCTEY1QZP2MOLRhb5i/I/ssyNV1vwHyQBF0x8= github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= github.com/hashicorp/errwrap v1.1.0 h1:OxrOeh75EUXMY8TBjag2fzXGZ40LB6IKw45YeGUDY2I= github.com/hashicorp/errwrap v1.1.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= +github.com/hashicorp/go-cleanhttp v0.5.0/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80= +github.com/hashicorp/go-cleanhttp v0.5.1/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80= +github.com/hashicorp/go-hclog v1.5.0 h1:bI2ocEMgcVlz55Oj1xZNBsVi900c7II+fWDyV9o+13c= +github.com/hashicorp/go-hclog v1.5.0/go.mod h1:W4Qnvbt70Wk/zYJryRzDRU/4r0kIg0PVHBcfoyhpF5M= +github.com/hashicorp/go-immutable-radix v1.0.0/go.mod h1:0y9vanUI8NX6FsYoO3zeMjhV/C5i9g4Q3DwcSNZ4P60= +github.com/hashicorp/go-immutable-radix v1.3.1 h1:DKHmCUm2hRBK510BaiZlwvpD40f8bJFeZnpfm2KLowc= +github.com/hashicorp/go-immutable-radix v1.3.1/go.mod h1:0y9vanUI8NX6FsYoO3zeMjhV/C5i9g4Q3DwcSNZ4P60= +github.com/hashicorp/go-metrics v0.5.2 h1:ErEYO2f//CjKsUDw4SmLzelsK6L3ZmOAR/4P9iS7ruY= +github.com/hashicorp/go-metrics v0.5.2/go.mod h1:KEjodfebIOuBYSAe/bHTm+HChmKSxAOXPBieMLYozDE= +github.com/hashicorp/go-msgpack v0.5.3/go.mod h1:ahLV/dePpqEmjfWmKiqvPkv/twdG7iPBM1vqhUKIvfM= +github.com/hashicorp/go-multierror v1.0.0/go.mod h1:dHtQlpGsu+cZNNAkkCN/P3hoUDHhCYQXV3UM06sGGrk= github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+lD48awMYo= github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM= +github.com/hashicorp/go-plugin v1.5.2 h1:aWv8eimFqWlsEiMrYZdPYl+FdHaBJSN4AWwGWfT1G2Y= +github.com/hashicorp/go-plugin v1.5.2/go.mod h1:w1sAEES3g3PuV/RzUrgow20W2uErMly84hhD3um1WL4= +github.com/hashicorp/go-retryablehttp v0.5.3/go.mod h1:9B5zBasrRhHXnJnui7y6sL7es7NDiJgTc6Er0maI1Xs= +github.com/hashicorp/go-rootcerts v1.0.0/go.mod h1:K6zTfqpRlCUIjkwsN4Z+hiSfzSTQa6eBIzfwKfwNnHU= +github.com/hashicorp/go-sockaddr v1.0.0/go.mod h1:7Xibr9yA9JjQq1JpNB2Vw7kxv8xerXegt+ozgdvDeDU= +github.com/hashicorp/go-syslog v1.0.0/go.mod h1:qPfqrKkXGihmCqbJM2mZgkZGvKG1dFdvsLplgctolz4= +github.com/hashicorp/go-uuid v1.0.0/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= +github.com/hashicorp/go-uuid v1.0.1 h1:fv1ep09latC32wFoVwnqcnKJGnMSdBanPczbHAYm1BE= +github.com/hashicorp/go-uuid v1.0.1/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= +github.com/hashicorp/go-version v1.2.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA= +github.com/hashicorp/go.net v0.0.1/go.mod h1:hjKkEWcCURg++eb33jQU7oqQcI9XDCnUzHA0oac0k90= +github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= +github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= github.com/hashicorp/golang-lru v1.0.2 h1:dV3g9Z/unq5DpblPpw+Oqcv4dU/1omnb4Ok8iPY6p1c= github.com/hashicorp/golang-lru v1.0.2/go.mod h1:iADmTwqILo4mZ8BN3D2Q6+9jd8WM5uGBxy+E8yxSoD4= github.com/hashicorp/golang-lru/arc/v2 v2.0.5 h1:l2zaLDubNhW4XO3LnliVj0GXO3+/CGNJAg1dcN2Fpfw= @@ -210,21 +542,53 @@ github.com/hashicorp/golang-lru/v2 v2.0.7 h1:a+bsQ5rvGLjzHuww6tVxozPZFVghXaHOwFs github.com/hashicorp/golang-lru/v2 v2.0.7/go.mod h1:QeFd9opnmA6QUJc5vARoKUSoFhyfM2/ZepoAG6RGpeM= github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4= github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ= +github.com/hashicorp/logutils v1.0.0/go.mod h1:QIAnNjmIWmVIIkWDTG1z5v++HQmx9WQRO+LraFDTW64= +github.com/hashicorp/mdns v1.0.0/go.mod h1:tL+uN++7HEJ6SQLQ2/p+z2pH24WQKWjBPkE0mNTz8vQ= +github.com/hashicorp/memberlist v0.1.3/go.mod h1:ajVTdAv/9Im8oMAAj5G31PhhMCZJV2pPBoIllUwCN7I= +github.com/hashicorp/serf v0.8.2/go.mod h1:6hOLApaqBFA1NXqRQAsxw9QxuDEvNxSQRwA/JwenrHc= +github.com/hashicorp/yamux v0.1.1 h1:yrQxtgseBDrq9Y652vSRDvsKCJKOUD+GzTS4Y0Y8pvE= +github.com/hashicorp/yamux v0.1.1/go.mod h1:CtWFDAQgb7dxtzFs4tWbplKIe2jSi3+5vKbgIO0SLnQ= +github.com/hdevalence/ed25519consensus v0.1.0 h1:jtBwzzcHuTmFrQN6xQZn6CQEO/V9f7HsjsjeEZ6auqU= +github.com/hdevalence/ed25519consensus v0.1.0/go.mod h1:w3BHWjwJbFU29IRHL1Iqkw3sus+7FctEyM4RqDxYNzo= github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU= github.com/hsanjuan/ipfs-lite v1.8.1 h1:Rpd9bTXYgkmnt8M5QsZnWwtW6ebxAB7HlU/d0zE4BmA= github.com/hsanjuan/ipfs-lite v1.8.1/go.mod h1:oGCaHBi+I73UFjc6wPAQ75hr4FjJhoqy6YPZjtghDIc= +github.com/huandu/go-assert v1.1.5 h1:fjemmA7sSfYHJD7CUqs9qTwwfdNAx7/j2/ZlHXzNB3c= +github.com/huandu/go-assert v1.1.5/go.mod h1:yOLvuqZwmcHIC5rIzrBhT7D3Q9c3GFnd0JrPVhn/06U= +github.com/huandu/skiplist v1.2.0 h1:gox56QD77HzSC0w+Ws3MH3iie755GBJU1OER3h5VsYw= +github.com/huandu/skiplist v1.2.0/go.mod h1:7v3iFjLcSAzO4fN5B8dvebvo/qsfumiLiDXMrPiHF9w= +github.com/hudl/fargo v1.3.0/go.mod h1:y3CKSmjA+wD2gak7sUSXTAoopbhU08POFhmITJgmKTg= github.com/huin/goupnp v1.3.0 h1:UvLUlWDNpoUdYzb2TCn+MuTWtcjXKSza2n6CBdQ0xXc= github.com/huin/goupnp v1.3.0/go.mod h1:gnGPsThkYa7bFi/KWmEysQRf48l2dvR5bxr2OFckNX8= +github.com/hyperledger/aries-framework-go v0.3.2 h1:GsSUaSEW82cr5X8b3Qf90GAi37kmTKHqpPJLhar13X8= +github.com/hyperledger/aries-framework-go v0.3.2/go.mod h1:SorUysWEBw+uyXhY5RAtg2iyNkWTIIPM8+Slkt1Spno= +github.com/hyperledger/aries-framework-go/component/kmscrypto v0.0.0-20230427134832-0c9969493bd3 h1:PCbDSujjQ6oTEnAHgtThNmbS7SPAYEDBlKOnZFE+Ujw= +github.com/hyperledger/aries-framework-go/component/kmscrypto v0.0.0-20230427134832-0c9969493bd3/go.mod h1:aEk0vHBmZsAdDfXaI12Kg5ipZGiB3qNqgbPt/e/Hm2s= +github.com/hyperledger/aries-framework-go/component/log v0.0.0-20230427134832-0c9969493bd3 h1:x5qFQraTX86z9GCwF28IxfnPm6QH5YgHaX+4x97Jwvw= +github.com/hyperledger/aries-framework-go/component/log v0.0.0-20230427134832-0c9969493bd3/go.mod h1:CvYs4l8X2NrrF93weLOu5RTOIJeVdoZITtjEflyuTyM= +github.com/hyperledger/aries-framework-go/component/models v0.0.0-20230501135648-a9a7ad029347 h1:oPGUCpmnm7yxsVllcMQnHF3uc3hy4jfrSCh7nvzXA00= +github.com/hyperledger/aries-framework-go/component/models v0.0.0-20230501135648-a9a7ad029347/go.mod h1:nF8fHsYY+GZl74AFAQaKAhYWOOSaLVzW/TZ0Sq/6axI= +github.com/hyperledger/aries-framework-go/component/storageutil v0.0.0-20230427134832-0c9969493bd3 h1:JGYA9l5zTlvsvfnXT9hYPpCokAjmVKX0/r7njba7OX4= +github.com/hyperledger/aries-framework-go/component/storageutil v0.0.0-20230427134832-0c9969493bd3/go.mod h1:aSG2dWjYVzu2PVBtOqsYghaChA5+UUXnBbL+MfVceYQ= +github.com/hyperledger/aries-framework-go/spi v0.0.0-20230427134832-0c9969493bd3 h1:ytWmOQZIYQfVJ4msFvrqlp6d+ZLhT43wS8rgE2m+J1A= +github.com/hyperledger/aries-framework-go/spi v0.0.0-20230427134832-0c9969493bd3/go.mod h1:oryUyWb23l/a3tAP9KW+GBbfcfqp9tZD4y5hSkFrkqI= +github.com/hyperledger/ursa-wrapper-go v0.3.1 h1:Do+QrVNniY77YK2jTIcyWqj9rm/Yb5SScN0bqCjiibA= +github.com/hyperledger/ursa-wrapper-go v0.3.1/go.mod h1:nPSAuMasIzSVciQo22PedBk4Opph6bJ6ia3ms7BH/mk= github.com/iancoleman/strcase v0.3.0 h1:nTXanmYxhfFAMjZL34Ov6gkzEsSJZ5DbhxWjvSASxEI= github.com/iancoleman/strcase v0.3.0/go.mod h1:iwCmte+B7n89clKwxIoIXy/HfoL7AsD47ZCWhYzw7ho= +github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= +github.com/improbable-eng/grpc-web v0.15.0 h1:BN+7z6uNXZ1tQGcNAuaU1YjsLTApzkjt2tzCixLaUPQ= +github.com/improbable-eng/grpc-web v0.15.0/go.mod h1:1sy9HKV4Jt9aEs9JSnkWlRJPuPtwNr0l57L4f878wP8= +github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8= github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8= github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw= +github.com/influxdata/influxdb1-client v0.0.0-20191209144304-8bf82d3c094d/go.mod h1:qj24IKcXYK6Iy9ceXlo3Tc+vtHo9lIhSX5JddghvEPo= github.com/invopop/yaml v0.2.0 h1:7zky/qH+O0DwAyoobXUqvVBwgBFRxKoQ/3FjcVpjTMY= github.com/invopop/yaml v0.2.0/go.mod h1:2XuRLgs/ouIrW3XNzuNj7J3Nvu/Dig5MXvbCEdiBN3Q= github.com/ipfs/bbloom v0.0.4 h1:Gi+8EGJ2y5qiD5FbsbpX/TMNcJw8gSqr7eyjHa4Fhvs= github.com/ipfs/bbloom v0.0.4/go.mod h1:cS9YprKXpoZ9lT0n/Mw/a6/aFV6DTjTLYHeA+gyqMG0= -github.com/ipfs/boxo v0.18.0 h1:MOL9/AgoV3e7jlVMInicaSdbgralfqSsbkc31dZ9tmw= -github.com/ipfs/boxo v0.18.0/go.mod h1:pIZgTWdm3k3pLF9Uq6MB8JEcW07UDwNJjlXW1HELW80= +github.com/ipfs/boxo v0.19.0 h1:UbX9FBJQF19ACLqRZOgdEla6jR/sC4H1O+iGE0NToXA= +github.com/ipfs/boxo v0.19.0/go.mod h1:V5gJzbIMwKEXrg3IdvAxIdF7UPgU4RsXmNGS8MQ/0D4= github.com/ipfs/go-bitfield v1.1.0 h1:fh7FIo8bSwaJEh6DdTWbCeZ1eqOaOkKFI74SCnsWbGA= github.com/ipfs/go-bitfield v1.1.0/go.mod h1:paqf1wjq/D2BBmzfTVFlJQ9IlFOZpg422HL0HqsGWHU= github.com/ipfs/go-block-format v0.2.0 h1:ZqrkxBA2ICbDRbK8KJs/u0O3dlp6gmAuuXUJNiW1Ycs= @@ -276,21 +640,48 @@ github.com/jbenet/go-temp-err-catcher v0.1.0/go.mod h1:0kJRvmDZXNMIiJirNPEYfhpPw github.com/jbenet/goprocess v0.1.4 h1:DRGOFReOMqqDNXwW70QkacFW0YN9QnwLV0Vqk+3oU0o= github.com/jbenet/goprocess v0.1.4/go.mod h1:5yspPrukOVuOLORacaBi858NqyClJPQxYZlqdZVfqY4= github.com/jellevandenhooff/dkim v0.0.0-20150330215556-f50fe3d243e1/go.mod h1:E0B/fFc00Y+Rasa88328GlI/XbtyysCtTHZS8h7IrBU= +github.com/jessevdk/go-flags v0.0.0-20141203071132-1679536dcc89/go.mod h1:4FA24M0QyGHXBuZZK/XkWh8h0e1EYbRYJSGM75WSRxI= +github.com/jhump/protoreflect v1.15.3 h1:6SFRuqU45u9hIZPJAoZ8c28T3nK64BNdp9w6jFonzls= +github.com/jhump/protoreflect v1.15.3/go.mod h1:4ORHmSBmlCW8fh3xHmJMGyul1zNqZK4Elxc8qKP+p1k= +github.com/jmespath/go-jmespath v0.0.0-20180206201540-c2b33e8439af/go.mod h1:Nht3zPeWKUH0NzdCt2Blrr5ys8VGpn0CEB0cQHVjt7k= +github.com/jmhodges/levigo v1.0.0 h1:q5EC36kV79HWeTBWsod3mG11EgStG3qArTKcvlksN1U= +github.com/jmhodges/levigo v1.0.0/go.mod h1:Q6Qx+uH3RAqyK4rFQroq9RL7mdkABMcfhEI+nNuzMJQ= +github.com/jonboulle/clockwork v0.1.0/go.mod h1:Ii8DK3G1RaLaWxj9trq07+26W01tbo22gdxWY5EU2bo= github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY= github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y= +github.com/jpillora/backoff v1.0.0/go.mod h1:J/6gKK9jxlEcS3zixgDgUAsiuZ7yrSoa/FX5e0EB2j4= +github.com/jrick/logrotate v1.0.0/go.mod h1:LNinyqDIJnpAur+b8yyulnQw/wDuN1+BYKlTRt3OuAQ= github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= +github.com/json-iterator/go v1.1.7/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= +github.com/json-iterator/go v1.1.8/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= +github.com/json-iterator/go v1.1.9/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= +github.com/json-iterator/go v1.1.10/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= +github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM= +github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo= github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU= github.com/jtolds/gls v4.20.0+incompatible h1:xdiiI2gbIgH/gLH7ADydsJ1uDOEzR8yvV7C0MuV77Wo= github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU= +github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w= +github.com/julienschmidt/httprouter v1.3.0/go.mod h1:JR6WtHb+2LUe8TCKY3cZOxFyyO8IZAc4RVcycCCAKdM= +github.com/kilic/bls12-381 v0.1.1-0.20210503002446-7b7597926c69 h1:kMJlf8z8wUcpyI+FQJIdGjAhfTww1y0AbQEv86bpVQI= +github.com/kilic/bls12-381 v0.1.1-0.20210503002446-7b7597926c69/go.mod h1:tlkavyke+Ac7h8R3gZIjI5LKBcvMlSWnXNMgT3vZXo8= +github.com/kisielk/errcheck v1.1.0/go.mod h1:EZBBE59ingxPouuu3KfxchcWSUPOHkagtvWXihfKN4Q= github.com/kisielk/errcheck v1.2.0/go.mod h1:/BMXB+zMLi60iA8Vv6Ksmxu/1UDYcXs4uQLJ+jE2L00= github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= -github.com/klauspost/compress v1.17.4 h1:Ej5ixsIri7BrIjBkRZLTo6ghwrEtHFk7ijlczPW4fZ4= -github.com/klauspost/compress v1.17.4/go.mod h1:/dCuZOvVtNoHsyb+cuJD3itjs3NbnF6KH9zAO4BDxPM= -github.com/klauspost/cpuid/v2 v2.2.6 h1:ndNyv040zDGIDh8thGkXYjnFtiN02M1PVVF+JE/48xc= -github.com/klauspost/cpuid/v2 v2.2.6/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws= +github.com/kkdai/bstream v0.0.0-20161212061736-f391b8402d23/go.mod h1:J+Gs4SYgM6CZQHDETBtE9HaSEkGmuNXF86RwHhHUvq4= +github.com/klauspost/compress v1.10.3/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYsUV+/s2qKfXs= +github.com/klauspost/compress v1.11.7/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYsUV+/s2qKfXs= +github.com/klauspost/compress v1.12.3/go.mod h1:8dP1Hq4DHOhN9w426knH3Rhby4rFm6D8eO+e+Dq5Gzg= +github.com/klauspost/compress v1.17.7 h1:ehO88t2UGzQK66LMdE8tibEd1ErmzZjNEqWkjLAKQQg= +github.com/klauspost/compress v1.17.7/go.mod h1:Di0epgTjJY877eYKx5yC51cX2A2Vl2ibi7bDH9ttBbw= +github.com/klauspost/cpuid/v2 v2.2.7 h1:ZWSB3igEs+d0qvnxR/ZBzXVmxkgt8DdzP6m9pfuVLDM= +github.com/klauspost/cpuid/v2 v2.2.7/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws= +github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= +github.com/konsorten/go-windows-terminal-sequences v1.0.3/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= github.com/koron/go-ssdp v0.0.4 h1:1IDwrghSKYM7yLf7XCzbByg2sJ/JcNOZRXS2jczTwz0= github.com/koron/go-ssdp v0.0.4/go.mod h1:oDXq+E5IL5q0U8uSBcoAXzTzInwy5lEgC91HoKtbmZk= +github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc= github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= @@ -302,14 +693,19 @@ github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= github.com/lens-vm/lens/host-go v0.0.0-20231127204031-8d858ed2926c h1:bG+mr4SqbYRU69L6CSvHDsKbRg5Q9vaN2T5g7qcrPdQ= github.com/lens-vm/lens/host-go v0.0.0-20231127204031-8d858ed2926c/go.mod h1:a4edl+KcOVk1Nj3EjG77htqg2/0Mmy3bSG0kl+FWVqQ= +github.com/leodido/go-urn v1.2.0/go.mod h1:+8+nEpDfqqsY+g338gtMEUOtuK+4dEMhiQEgxpxOKII= +github.com/leodido/go-urn v1.2.1 h1:BqpAaACuzVSgi/VLzGZIobT2z4v53pjosyNd9Yv6n/w= +github.com/leodido/go-urn v1.2.1/go.mod h1:zt4jvISO2HfUBqxjfIshjdMTYS56ZS/qv49ictyFfxY= +github.com/lib/pq v1.10.7 h1:p7ZhMD+KsSRozJr34udlUrhboJwWAgCg34+/ZZNvZZw= +github.com/lib/pq v1.10.7/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= github.com/libp2p/go-buffer-pool v0.1.0 h1:oK4mSFcQz7cTQIfqbe4MIj9gLW+mnanjyFtc6cdF0Y8= github.com/libp2p/go-buffer-pool v0.1.0/go.mod h1:N+vh8gMqimBzdKkSMVuydVDq+UV5QTWy5HSiZacSbPg= github.com/libp2p/go-cidranger v1.1.0 h1:ewPN8EZ0dd1LSnrtuwd4709PXVcITVeuwbag38yPW7c= github.com/libp2p/go-cidranger v1.1.0/go.mod h1:KWZTfSr+r9qEo9OkI9/SIEeAtw+NNoU0dXIXt15Okic= github.com/libp2p/go-flow-metrics v0.1.0 h1:0iPhMI8PskQwzh57jB9WxIuIOQ0r+15PChFGkx3Q3WM= github.com/libp2p/go-flow-metrics v0.1.0/go.mod h1:4Xi8MX8wj5aWNDAZttg6UPmc0ZrnFNsMtpsYUClFtro= -github.com/libp2p/go-libp2p v0.32.2 h1:s8GYN4YJzgUoyeYNPdW7JZeZ5Ee31iNaIBfGYMAY4FQ= -github.com/libp2p/go-libp2p v0.32.2/go.mod h1:E0LKe+diV/ZVJVnOJby8VC5xzHF0660osg71skcxJvk= +github.com/libp2p/go-libp2p v0.33.2 h1:vCdwnFxoGOXMKmaGHlDSnL4bM3fQeW8pgIa9DECnb40= +github.com/libp2p/go-libp2p v0.33.2/go.mod h1:zTeppLuCvUIkT118pFVzA8xzP/p2dJYOMApCkFh0Yww= github.com/libp2p/go-libp2p-asn-util v0.4.1 h1:xqL7++IKD9TBFMgnLPZR6/6iYhawHKHl950SO9L6n94= github.com/libp2p/go-libp2p-asn-util v0.4.1/go.mod h1:d/NI6XZ9qxw67b4e+NgpQexCIiFYJjErASrYW4PFDN8= github.com/libp2p/go-libp2p-gostream v0.6.0 h1:QfAiWeQRce6pqnYfmIVWJFXNdDyfiR/qkCnjyaZUPYU= @@ -338,24 +734,44 @@ github.com/libp2p/go-yamux/v4 v4.0.1 h1:FfDR4S1wj6Bw2Pqbc8Uz7pCxeRBPbwsBbEdfwiCy github.com/libp2p/go-yamux/v4 v4.0.1/go.mod h1:NWjl8ZTLOGlozrXSOZ/HlfG++39iKNnM5wwmtQP1YB4= github.com/libp2p/zeroconf/v2 v2.2.0 h1:Cup06Jv6u81HLhIj1KasuNM/RHHrJ8T7wOTS4+Tv53Q= github.com/libp2p/zeroconf/v2 v2.2.0/go.mod h1:fuJqLnUwZTshS3U/bMRJ3+ow/v9oid1n0DmyYyNO1Xs= +github.com/lightstep/lightstep-tracer-common/golang/gogo v0.0.0-20190605223551-bc2310a04743/go.mod h1:qklhhLq1aX+mtWk9cPHPzaBjWImj5ULL6C7HFJtXQMM= +github.com/lightstep/lightstep-tracer-go v0.18.1/go.mod h1:jlF1pusYV4pidLvZ+XD0UBX0ZE6WURAspgAczcDHrL4= +github.com/linxGnu/grocksdb v1.8.12 h1:1/pCztQUOa3BX/1gR3jSZDoaKFpeHFvQ1XrqZpSvZVo= +github.com/linxGnu/grocksdb v1.8.12/go.mod h1:xZCIb5Muw+nhbDK4Y5UJuOrin5MceOuiXkVUR7vp4WY= +github.com/lmittmann/tint v1.0.4 h1:LeYihpJ9hyGvE0w+K2okPTGUdVLfng1+nDNVR4vWISc= +github.com/lmittmann/tint v1.0.4/go.mod h1:HIS3gSy7qNwGCj+5oRjAutErFBl4BzdQP6cJZ0NfMwE= github.com/lunixbochs/vtclean v1.0.0/go.mod h1:pHhQNgMf3btfWnGBVipUOjRYhoOsdGqdm/+2c2E2WMI= +github.com/lyft/protoc-gen-validate v0.0.13/go.mod h1:XbGvPuh87YZc5TdIa2/I4pLk0QoUACkjt2znoq26NVQ= +github.com/magiconair/properties v1.8.0/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ= github.com/magiconair/properties v1.8.7 h1:IeQXZAiQcpL9mgcAe1Nu6cX9LLw6ExEHKjN0VQdvPDY= github.com/magiconair/properties v1.8.7/go.mod h1:Dhd985XPs7jluiymwWYZ0G4Z61jb3vdS329zhj2hYo0= github.com/mailru/easyjson v0.0.0-20190312143242-1de009706dbe/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0= github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc= +github.com/manifoldco/promptui v0.9.0 h1:3V4HzJk1TtXW1MTZMP7mdlwbBpIinw3HztaIlYthEiA= +github.com/manifoldco/promptui v0.9.0/go.mod h1:ka04sppxSGFAtxX0qhlYQjISsg9mR4GWtQEhdbn6Pgg= github.com/marten-seemann/tcp v0.0.0-20210406111302-dfbc87cc63fd h1:br0buuQ854V8u83wA0rVZ8ttrq5CpaPZdvrK0LP2lOk= github.com/marten-seemann/tcp v0.0.0-20210406111302-dfbc87cc63fd/go.mod h1:QuCEs1Nt24+FYQEqAAncTDPJIuGs+LxK1MCiFL25pMU= +github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU= +github.com/mattn/go-colorable v0.1.9/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= +github.com/mattn/go-colorable v0.1.12/go.mod h1:u5H1YNBxpqRaxsYJYSkiCWKzEfiAb1Gb520KVy5xxl4= +github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA= +github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg= +github.com/mattn/go-isatty v0.0.3/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4= +github.com/mattn/go-isatty v0.0.4/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4= +github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94= +github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= +github.com/mattn/go-isatty v0.0.19/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= +github.com/mattn/go-runewidth v0.0.2/go.mod h1:LwmH8dsx7+W8Uxz3IHJYH5QSwggIsqBzpuz5H//U1FU= github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= -github.com/matttproud/golang_protobuf_extensions/v2 v2.0.0 h1:jWpvCLoY8Z/e3VKvlsiIGKtc+UG6U5vzxaoagmhXfyg= -github.com/matttproud/golang_protobuf_extensions/v2 v2.0.0/go.mod h1:QUyp042oQthUoa9bqDv0ER0wrtXnBruoNd7aNjkbP+k= github.com/microcosm-cc/bluemonday v1.0.1/go.mod h1:hsXNsILzKxV+sX77C5b8FSuKF00vh2OMYv+xgHpAMF4= +github.com/miekg/dns v1.0.14/go.mod h1:W1PPwlIAgtquWBMBEV9nkV9Cazfe8ScdGz/Lj7v3Nrg= github.com/miekg/dns v1.1.41/go.mod h1:p6aan82bvRIyn+zDIv9xYNUpwa73JcSh9BKwknJysuI= -github.com/miekg/dns v1.1.57 h1:Jzi7ApEIzwEPLHWRcafCN9LZSBbqQpxjt/wpgvg7wcM= -github.com/miekg/dns v1.1.57/go.mod h1:uqRjCRUuEAA6qsOiJvDd+CFo/vW+y5WR6SNmHE55hZk= +github.com/miekg/dns v1.1.58 h1:ca2Hdkz+cDg/7eNF6V56jjzuZ4aCAE+DbVkILdQWG/4= +github.com/miekg/dns v1.1.58/go.mod h1:Ypv+3b/KadlvW9vJfXOTf300O4UqaHFzFCuHz+rPkBY= github.com/mikioh/tcp v0.0.0-20190314235350-803a9b46060c h1:bzE/A84HN25pxAuk9Eej1Kz9OUelF97nAc82bDquQI8= github.com/mikioh/tcp v0.0.0-20190314235350-803a9b46060c/go.mod h1:0SQS9kMwD2VsyFEB++InYyBJroV/FRmBgcydeSUcJms= github.com/mikioh/tcpinfo v0.0.0-20190314235526-30a79bb1804b h1:z78hV3sbSMAUoyUMM0I83AUIT6Hu17AWfgjzIbtrYFc= @@ -363,28 +779,46 @@ github.com/mikioh/tcpinfo v0.0.0-20190314235526-30a79bb1804b/go.mod h1:lxPUiZwKo github.com/mikioh/tcpopt v0.0.0-20190314235656-172688c1accc h1:PTfri+PuQmWDqERdnNMiD9ZejrlswWrCpBEZgWOiTrc= github.com/mikioh/tcpopt v0.0.0-20190314235656-172688c1accc/go.mod h1:cGKTAVKx4SxOuR/czcZ/E2RSJ3sfHs8FpHhQ5CWMf9s= github.com/minio/blake2b-simd v0.0.0-20160723061019-3f5f724cb5b1/go.mod h1:pD8RvIylQ358TN4wwqatJ8rNavkEINozVn9DtGI3dfQ= +github.com/minio/highwayhash v1.0.2 h1:Aak5U0nElisjDCfPSG79Tgzkn2gl66NxOMspRrKnA/g= +github.com/minio/highwayhash v1.0.2/go.mod h1:BQskDq+xkJ12lmlUUi7U0M5Swg3EWR+dLTk+kldvVxY= github.com/minio/sha256-simd v0.1.1-0.20190913151208-6de447530771/go.mod h1:B5e1o+1/KgNmWrSQK08Y6Z1Vb5pwIktudl0J58iy0KM= github.com/minio/sha256-simd v1.0.1 h1:6kaan5IFmwTNynnKKpDHe6FWHohJOHhCPchzK49dzMM= github.com/minio/sha256-simd v1.0.1/go.mod h1:Pz6AKMiUdngCLpeTL/RJY1M9rUuPMYujV5xJjtbRSN8= +github.com/mitchellh/cli v1.0.0/go.mod h1:hNIlj7HEI86fIcpObd7a0FcrxTWetlwJDGcceTlRvqc= +github.com/mitchellh/go-homedir v1.0.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y= github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= +github.com/mitchellh/go-testing-interface v1.0.0/go.mod h1:kRemZodwjscx+RGhAo8eIhFbs2+BFgRtFPeD/KE+zxI= +github.com/mitchellh/go-testing-interface v1.14.1 h1:jrgshOhYAUVNMAJiKbEu7EqAwgJJ2JqpQmpLJOu07cU= +github.com/mitchellh/go-testing-interface v1.14.1/go.mod h1:gfgS7OtZj6MA4U1UrDRp04twqAjfvlZyCfX3sDjEym8= +github.com/mitchellh/gox v0.4.0/go.mod h1:Sd9lOJ0+aimLBi73mGofS1ycjY8lL3uZM3JPS42BGNg= +github.com/mitchellh/iochan v1.0.0/go.mod h1:JwYml1nuB7xOzsp52dPpHFffvOCDupsG0QubkSMEySY= +github.com/mitchellh/mapstructure v0.0.0-20160808181253-ca63d7c062ee/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= +github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY= github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= +github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= +github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg= github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= +github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= +github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M= +github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk= github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826 h1:RWengNIwukTxcDr9M+97sNutRR1RKhG96O6jWumTTnw= github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826/go.mod h1:TaXosZuwdSHYgviHp1DAtfrULt5eUgsSMsZf+YrPgl8= github.com/mr-tron/base58 v1.1.2/go.mod h1:BinMc/sQntlIE1frQmRFPUoPA1Zkr8VRgBdjWI2mNwc= github.com/mr-tron/base58 v1.2.0 h1:T/HDJBh4ZCPbU39/+c3rRvE0uKBQlU27+QI8LJ4t64o= github.com/mr-tron/base58 v1.2.0/go.mod h1:BinMc/sQntlIE1frQmRFPUoPA1Zkr8VRgBdjWI2mNwc= +github.com/mtibben/percent v0.2.1 h1:5gssi8Nqo8QU/r2pynCm+hBQHpkB/uNK7BJCFogWdzs= +github.com/mtibben/percent v0.2.1/go.mod h1:KG9uO+SZkUp+VkRHsCdYQV3XSZrrSpR3O9ibNBTZrns= github.com/multiformats/go-base32 v0.1.0 h1:pVx9xoSPqEIQG8o+UbAe7DNi51oej1NtK+aGkbLYxPE= github.com/multiformats/go-base32 v0.1.0/go.mod h1:Kj3tFY6zNr+ABYMqeUNeGvkIC/UYgtWibDcT0rExnbI= github.com/multiformats/go-base36 v0.2.0 h1:lFsAbNOGeKtuKozrtBsAkSVhv1p9D0/qedU9rQyccr0= github.com/multiformats/go-base36 v0.2.0/go.mod h1:qvnKE++v+2MWCfePClUEjE78Z7P2a1UV0xHgWc0hkp4= github.com/multiformats/go-multiaddr v0.1.1/go.mod h1:aMKBKNEYmzmDmxfX88/vz+J5IU55txyt0p4aiWVohjo= github.com/multiformats/go-multiaddr v0.2.0/go.mod h1:0nO36NvPpyV4QzvTLi/lafl2y95ncPj0vFwVF6k6wJ4= -github.com/multiformats/go-multiaddr v0.12.2 h1:9G9sTY/wCYajKa9lyfWPmpZAwe6oV+Wb1zcmMS1HG24= -github.com/multiformats/go-multiaddr v0.12.2/go.mod h1:GKyaTYjZRdcUhyOetrxTk9z0cW+jA/YrnqTOvKgi44M= +github.com/multiformats/go-multiaddr v0.12.3 h1:hVBXvPRcKG0w80VinQ23P5t7czWgg65BmIvQKjDydU8= +github.com/multiformats/go-multiaddr v0.12.3/go.mod h1:sBXrNzucqkFJhvKOiwwLyqamGa/P5EIXNPLovyhQCII= github.com/multiformats/go-multiaddr-dns v0.3.1 h1:QgQgR+LQVt3NPTjbrLLpsaT2ufAA2y0Mkk+QRVJbW3A= github.com/multiformats/go-multiaddr-dns v0.3.1/go.mod h1:G/245BRQ6FJGmryJCrOuTdB37AMA5AMOVuO6NY3JwTk= github.com/multiformats/go-multiaddr-fmt v0.1.0 h1:WLEFClPycPkp4fnIzoFoV9FVd49/eQsuaL3/CWe167E= @@ -401,80 +835,180 @@ github.com/multiformats/go-multistream v0.5.0/go.mod h1:n6tMZiwiP2wUsR8DgfDWw1dy github.com/multiformats/go-varint v0.0.1/go.mod h1:3Ls8CIEsrijN6+B7PbrXRPxHRPuXSrVKRY101jdMZYE= github.com/multiformats/go-varint v0.0.7 h1:sWSGR+f/eu5ABZA2ZpYKBILXTTs9JWpdEM/nEGOHFS8= github.com/multiformats/go-varint v0.0.7/go.mod h1:r8PUYw/fD/SjBCiKOoDlGF6QawOELpZAu9eioSos/OU= +github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= +github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f h1:KUppIJq7/+SVif2QVs3tOP0zanoHgBEVAwHxUSIzRqU= +github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= +github.com/mwitkow/grpc-proxy v0.0.0-20181017164139-0f1106ef9c76/go.mod h1:x5OoJHDHqxHS801UIuhqGl6QdSAEJvtausosHSdazIo= +github.com/nats-io/jwt v0.3.0/go.mod h1:fRYCDE99xlTsqUzISS1Bi75UBJ6ljOJQOAAu5VglpSg= +github.com/nats-io/jwt v0.3.2/go.mod h1:/euKqTS1ZD+zzjYrY7pseZrTtWQSjujC7xjPc8wL6eU= +github.com/nats-io/nats-server/v2 v2.1.2/go.mod h1:Afk+wRZqkMQs/p45uXdrVLuab3gwv3Z8C4HTBu8GD/k= +github.com/nats-io/nats.go v1.9.1/go.mod h1:ZjDU1L/7fJ09jvUSRVBR2e7+RnLiiIQyqyzEE/Zbp4w= +github.com/nats-io/nkeys v0.1.0/go.mod h1:xpnFELMwJABBLVhffcfd1MZx6VsNRFpEugbxziKVo7w= +github.com/nats-io/nkeys v0.1.3/go.mod h1:xpnFELMwJABBLVhffcfd1MZx6VsNRFpEugbxziKVo7w= +github.com/nats-io/nuid v1.0.1/go.mod h1:19wcPz3Ph3q0Jbyiqsd0kePYG7A95tJPxeL+1OSON2c= github.com/neelance/astrewrite v0.0.0-20160511093645-99348263ae86/go.mod h1:kHJEU3ofeGjhHklVoIGuVj85JJwZ6kWPaJwCIxgnFmo= github.com/neelance/sourcemap v0.0.0-20151028013722-8c68805598ab/go.mod h1:Qr6/a/Q4r9LP1IltGz7tA7iOK1WonHEYhu1HRBA7ZiM= +github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno= github.com/nxadm/tail v1.4.4/go.mod h1:kenIhsEOeOJmVchQTgglprH7qJGnHDVpk1VPCcaMI8A= github.com/nxadm/tail v1.4.8 h1:nPr65rt6Y5JFSKQO7qToXr7pePgD6Gwiw05lkbyAQTE= github.com/nxadm/tail v1.4.8/go.mod h1:+ncqLTQzXmGhMZNUePPaPqPvBxHAIsmXswZKocGu+AU= +github.com/oasisprotocol/curve25519-voi v0.0.0-20230904125328-1f23a7beb09a h1:dlRvE5fWabOchtH7znfiFCcOvmIYgOeAS5ifBXBlh9Q= +github.com/oasisprotocol/curve25519-voi v0.0.0-20230904125328-1f23a7beb09a/go.mod h1:hVoHR2EVESiICEMbg137etN/Lx+lSrHPTD39Z/uE+2s= +github.com/oklog/oklog v0.3.2/go.mod h1:FCV+B7mhrz4o+ueLpx+KqkyXRGMWOYEvfiXtdGtbWGs= +github.com/oklog/run v1.0.0/go.mod h1:dlhp/R75TPv97u0XWUtDeV/lRKWPKSdTuV0TZvrmrQA= +github.com/oklog/run v1.1.0 h1:GEenZ1cK0+q0+wsJew9qUg/DyD8k3JzYsZAi5gYi2mA= +github.com/oklog/run v1.1.0/go.mod h1:sVPdnTZT1zYwAJeCMu2Th4T21pA3FPOQRfWjQlk7DVU= +github.com/olekukonko/tablewriter v0.0.0-20170122224234-a0225b3f23b5/go.mod h1:vsDQFd/mU46D+Z4whnwzcISnGGzXWMclvtLoiIKAKIo= github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= +github.com/onsi/ginkgo v1.7.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= github.com/onsi/ginkgo v1.8.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= github.com/onsi/ginkgo v1.12.1/go.mod h1:zj2OWP4+oCPe1qIXoGWkgMRwljMUYCdkwsT2108oapk= -github.com/onsi/ginkgo v1.14.0/go.mod h1:iSB4RoI2tjJc9BBv4NKIKWKya62Rps+oPG/Lv9klQyY= +github.com/onsi/ginkgo v1.16.4/go.mod h1:dX+/inL/fNMqNlz0e9LfyB9TswhZpCVdJM/Z6Vvnwo0= github.com/onsi/ginkgo v1.16.5 h1:8xi0RTUf59SOSfEtZMvwTvXYMzG4gV23XVHOZiXNtnE= github.com/onsi/ginkgo v1.16.5/go.mod h1:+E8gABHa3K6zRBolWtd+ROzc/U5bkGt0FwiG042wbpU= -github.com/onsi/ginkgo/v2 v2.13.2 h1:Bi2gGVkfn6gQcjNjZJVO8Gf0FHzMPf2phUei9tejVMs= -github.com/onsi/ginkgo/v2 v2.13.2/go.mod h1:XStQ8QcGwLyF4HdfcZB8SFOS/MWCgDuXMSBe6zrvLgM= +github.com/onsi/ginkgo/v2 v2.1.3/go.mod h1:vw5CSIxN1JObi/U8gcbwft7ZxR2dgaR70JSE3/PpL4c= +github.com/onsi/ginkgo/v2 v2.15.0 h1:79HwNRBAZHOEwrczrgSOPy+eFTTlIGELKy5as+ClttY= +github.com/onsi/ginkgo/v2 v2.15.0/go.mod h1:HlxMHtYF57y6Dpf+mc5529KKmSq9h2FpCF+/ZkwUxKM= +github.com/onsi/gomega v1.4.3/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY= github.com/onsi/gomega v1.5.0/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY= github.com/onsi/gomega v1.7.1/go.mod h1:XdKZgCCFLUoM/7CFJVPcG8C1xQ1AJ0vpAezJrB7JYyY= github.com/onsi/gomega v1.10.1/go.mod h1:iN09h71vgCQne3DLsj+A5owkum+a2tYe+TOCB1ybHNo= -github.com/onsi/gomega v1.29.0 h1:KIA/t2t5UBzoirT4H9tsML45GEbo3ouUnBHsCfD2tVg= -github.com/onsi/gomega v1.29.0/go.mod h1:9sxs+SwGrKI0+PWe4Fxa9tFQQBG5xSsSbMXOI8PPpoQ= +github.com/onsi/gomega v1.17.0/go.mod h1:HnhC7FXeEQY45zxNK3PPoIUhzk/80Xly9PcubAlGdZY= +github.com/onsi/gomega v1.19.0/go.mod h1:LY+I3pBVzYsTBU1AnDwOSxaYi9WoWiqgwooUqq9yPro= +github.com/onsi/gomega v1.30.0 h1:hvMK7xYz4D3HapigLTeGdId/NcfQx1VHMJc60ew99+8= +github.com/onsi/gomega v1.30.0/go.mod h1:9sxs+SwGrKI0+PWe4Fxa9tFQQBG5xSsSbMXOI8PPpoQ= +github.com/op/go-logging v0.0.0-20160315200505-970db520ece7/go.mod h1:HzydrMdWErDVzsI23lYNej1Htcns9BCg93Dk0bBINWk= +github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U= +github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM= +github.com/opencontainers/image-spec v1.1.0-rc5 h1:Ygwkfw9bpDvs+c9E34SdgGOj41dX/cbdlwvlWt0pnFI= +github.com/opencontainers/image-spec v1.1.0-rc5/go.mod h1:X4pATf0uXsnn3g5aiGIsVnJBR4mxhKzfwmvK/B2NTm8= +github.com/opencontainers/runc v1.1.3 h1:vIXrkId+0/J2Ymu2m7VjGvbSlAId9XNRPhn2p4b+d8w= +github.com/opencontainers/runc v1.1.3/go.mod h1:1J5XiS+vdZ3wCyZybsuxXZWGrgSr8fFJHLXuG2PsnNg= github.com/opencontainers/runtime-spec v1.0.2/go.mod h1:jwyrGlmzljRJv/Fgzds9SsS/C5hL+LL3ko9hs6T5lQ0= -github.com/opencontainers/runtime-spec v1.1.0 h1:HHUyrt9mwHUjtasSbXSMvs4cyFxh+Bll4AjJ9odEGpg= -github.com/opencontainers/runtime-spec v1.1.0/go.mod h1:jwyrGlmzljRJv/Fgzds9SsS/C5hL+LL3ko9hs6T5lQ0= +github.com/opencontainers/runtime-spec v1.2.0 h1:z97+pHb3uELt/yiAWD691HNHQIF07bE7dzrbT927iTk= +github.com/opencontainers/runtime-spec v1.2.0/go.mod h1:jwyrGlmzljRJv/Fgzds9SsS/C5hL+LL3ko9hs6T5lQ0= +github.com/opentracing-contrib/go-observer v0.0.0-20170622124052-a52f23424492/go.mod h1:Ngi6UdF0k5OKD5t5wlmGhe/EDKPoUM3BXZSSfIuJbis= +github.com/opentracing/basictracer-go v1.0.0/go.mod h1:QfBfYuafItcjQuMwinw9GhYKwFXS9KnPs5lxoYwgW74= +github.com/opentracing/opentracing-go v1.0.2/go.mod h1:UkNAQd3GIcIGf0SeVgPpRdFStlNbqXla1AfSYxPUl2o= +github.com/opentracing/opentracing-go v1.1.0/go.mod h1:UkNAQd3GIcIGf0SeVgPpRdFStlNbqXla1AfSYxPUl2o= github.com/opentracing/opentracing-go v1.2.0 h1:uEJPy/1a5RIPAJ0Ov+OIO8OxWu77jEv+1B0VhjKrZUs= github.com/opentracing/opentracing-go v1.2.0/go.mod h1:GxEUsuufX4nBwe+T+Wl9TAgYrxe9dPLANfrWvHYVTgc= +github.com/openzipkin-contrib/zipkin-go-opentracing v0.4.5/go.mod h1:/wsWhb9smxSfWAKL3wpBW7V8scJMt8N8gnaMCS9E/cA= github.com/openzipkin/zipkin-go v0.1.1/go.mod h1:NtoC/o8u3JlF1lSlyPNswIbeQH9bJTmOf0Erfk+hxe8= +github.com/openzipkin/zipkin-go v0.1.6/go.mod h1:QgAqvLzwWbR/WpD4A3cGpPtJrZXNIiJc5AZX7/PBEpw= +github.com/openzipkin/zipkin-go v0.2.1/go.mod h1:NaW6tEwdmWMaCDZzg8sh+IBNOxHMPnhQw8ySjnjRyN4= +github.com/openzipkin/zipkin-go v0.2.2/go.mod h1:NaW6tEwdmWMaCDZzg8sh+IBNOxHMPnhQw8ySjnjRyN4= +github.com/ory/dockertest v3.3.5+incompatible h1:iLLK6SQwIhcbrG783Dghaaa3WPzGc+4Emza6EbVUUGA= +github.com/ory/dockertest v3.3.5+incompatible/go.mod h1:1vX4m9wsvi00u5bseYwXaSnhNrne+V0E6LAcBILJdPs= +github.com/pact-foundation/pact-go v1.0.4/go.mod h1:uExwJY4kCzNPcHRj+hCR/HBbOOIwwtUjcrb0b5/5kLM= +github.com/pascaldekloe/goe v0.0.0-20180627143212-57f6aae5913c/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc= +github.com/pascaldekloe/goe v0.1.0 h1:cBOtyMzM9HTpWjXfbbunk26uA6nG3a8n06Wieeh0MwY= +github.com/pascaldekloe/goe v0.1.0/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc= github.com/pbnjay/memory v0.0.0-20210728143218-7b4eea64cf58 h1:onHthvaw9LFnH4t2DcNVpwGmV9E1BkGknEliJkfwQj0= github.com/pbnjay/memory v0.0.0-20210728143218-7b4eea64cf58/go.mod h1:DXv8WO4yhMYhSNPKjeNKa5WY9YCIEBRbNzFFPJbWO6Y= +github.com/pborman/uuid v1.2.0/go.mod h1:X/NO0urCmaxf9VXbdlT7C2Yzkj2IKimNn4k+gtPdI/k= +github.com/pelletier/go-toml v1.2.0/go.mod h1:5z9KED0ma1S8pY6P1sdut58dfprrGBbd/94hg7ilaic= github.com/pelletier/go-toml/v2 v2.1.0 h1:FnwAJ4oYMvbT/34k9zzHuZNrhlz48GB3/s6at6/MHO4= github.com/pelletier/go-toml/v2 v2.1.0/go.mod h1:tJU2Z3ZkXwnxa4DPO899bsyIoywizdUvyaeZurnPPDc= +github.com/performancecopilot/speed v3.0.0+incompatible/go.mod h1:/CLtqpZ5gBg1M9iaPbIdPPGyKcA8hKdoy6hAWba7Yac= github.com/perimeterx/marshmallow v1.1.5 h1:a2LALqQ1BlHM8PZblsDdidgv1mWi1DgC2UmX50IvK2s= github.com/perimeterx/marshmallow v1.1.5/go.mod h1:dsXbUu8CRzfYP5a87xpp0xq9S3u0Vchtcl8we9tYaXw= +github.com/petermattis/goid v0.0.0-20180202154549-b0b1615b78e5/go.mod h1:jvVRKCrJTQWu0XVbaOlby/2lO20uSCHEMzzplHXte1o= +github.com/petermattis/goid v0.0.0-20230904192822-1876fd5063bc h1:8bQZVK1X6BJR/6nYUPxQEP+ReTsceJTKizeuwjWOPUA= +github.com/petermattis/goid v0.0.0-20230904192822-1876fd5063bc/go.mod h1:pxMtw7cyUw6B2bRH0ZBANSPg+AoSud1I1iyJHI69jH4= +github.com/pierrec/lz4 v1.0.2-0.20190131084431-473cd7ce01a1/go.mod h1:3/3N9NVKO0jef7pBehbT1qWhCMrIgbYNnFAZCqQ5LRc= +github.com/pierrec/lz4 v2.0.5+incompatible/go.mod h1:pdkljMzZIN41W+lC3N2tnIh5sFi+IEE17M5jbnwPHcY= +github.com/pingcap/errors v0.11.4 h1:lFuQV/oaUMGcD2tqt+01ROSmJs75VG1ToEOkZIZ4nE4= +github.com/pingcap/errors v0.11.4/go.mod h1:Oi8TUi2kEtXXLMJk9l1cGmz20kV3TaQ0usTwv5KuLY8= +github.com/piprate/json-gold v0.5.0 h1:RmGh1PYboCFcchVFuh2pbSWAZy4XJaqTMU4KQYsApbM= +github.com/piprate/json-gold v0.5.0/go.mod h1:WZ501QQMbZZ+3pXFPhQKzNwS1+jls0oqov3uQ2WasLs= +github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA= +github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pkg/profile v1.2.1/go.mod h1:hJw3o1OdXxsrSjjVksARp5W95eeEaEfptyVZyv6JUPA= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U= github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/polydawn/refmt v0.89.0 h1:ADJTApkvkeBZsN0tBTx8QjpD9JkmxbKp0cxfr9qszm4= github.com/polydawn/refmt v0.89.0/go.mod h1:/zvteZs/GwLtCgZ4BL6CBsk9IKIlexP43ObX9AxTqTw= +github.com/posener/complete v1.1.1/go.mod h1:em0nMJCgc9GFtwrmVmEMR/ZL6WyhyjMBndrE9hABlRI= +github.com/pquerna/cachecontrol v0.1.0 h1:yJMy84ti9h/+OEWa752kBTKv4XC30OtVVHYv/8cTqKc= +github.com/pquerna/cachecontrol v0.1.0/go.mod h1:NrUG3Z7Rdu85UNR3vm7SOsl1nFIeSiQnrHV5K9mBcUI= github.com/prometheus/client_golang v0.8.0/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw= +github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw= +github.com/prometheus/client_golang v0.9.3-0.20190127221311-3c4408c8b829/go.mod h1:p2iRAGwDERtqlqzRXnrOVns+ignqQo//hLXqYxZYVNs= +github.com/prometheus/client_golang v1.0.0/go.mod h1:db9x61etRT2tGnBNRi70OPL5FsnadC4Ky3P0J6CfImo= +github.com/prometheus/client_golang v1.3.0/go.mod h1:hJaj2vgQTGQmVCsAACORcieXFeDPbaTKGT+JTgUa3og= +github.com/prometheus/client_golang v1.4.0/go.mod h1:e9GMxYsXl05ICDXkRhurwBS4Q3OK1iX/F2sw+iXX5zU= +github.com/prometheus/client_golang v1.7.1/go.mod h1:PY5Wy2awLA44sXw4AOSfFBetzPP4j5+D6mVACh+pe2M= github.com/prometheus/client_golang v1.18.0 h1:HzFfmkOzH5Q8L8G+kSJKUx5dtG87sewO+FoDDqP5Tbk= github.com/prometheus/client_golang v1.18.0/go.mod h1:T+GXkCk5wSJyOqMIzVgvvjFDlkOQntgjkJWKrN5txjA= github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= +github.com/prometheus/client_model v0.0.0-20190115171406-56726106282f/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= +github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= -github.com/prometheus/client_model v0.5.0 h1:VQw1hfvPvk3Uv6Qf29VrPF32JB6rtbgI6cYPYQjL0Qw= -github.com/prometheus/client_model v0.5.0/go.mod h1:dTiFglRmd66nLR9Pv9f0mZi7B7fk5Pm3gvsjB5tr+kI= +github.com/prometheus/client_model v0.1.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/prometheus/client_model v0.2.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/prometheus/client_model v0.6.0 h1:k1v3CzpSRUTrKMppY35TLwPvxHqBu0bYgxZzqGIgaos= +github.com/prometheus/client_model v0.6.0/go.mod h1:NTQHnmxFpouOD0DpvP4XujX3CdOAGQPoaGhyTchlyt8= github.com/prometheus/common v0.0.0-20180801064454-c7de2306084e/go.mod h1:daVV7qP5qjZbuso7PdcryaAu0sAZbrN9i7WWcTMWvro= -github.com/prometheus/common v0.45.0 h1:2BGz0eBc2hdMDLnO/8n0jeB3oPrt2D08CekT0lneoxM= -github.com/prometheus/common v0.45.0/go.mod h1:YJmSTw9BoKxJplESWWxlbyttQR4uaEcGyv9MZjVOJsY= +github.com/prometheus/common v0.2.0/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4= +github.com/prometheus/common v0.4.1/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4= +github.com/prometheus/common v0.7.0/go.mod h1:DjGbpBbp5NYNiECxcL/VnbXCCaQpKd3tt26CguLLsqA= +github.com/prometheus/common v0.9.1/go.mod h1:yhUN8i9wzaXS3w1O07YhxHEBxD+W35wd8bs7vj7HSQ4= +github.com/prometheus/common v0.10.0/go.mod h1:Tlit/dnDKsSWFlCLTWaA1cyBgKHSMdTB80sz/V91rCo= +github.com/prometheus/common v0.15.0/go.mod h1:U+gB1OBLb1lF3O42bTCL+FK18tX9Oar16Clt/msog/s= +github.com/prometheus/common v0.47.0 h1:p5Cz0FNHo7SnWOmWmoRozVcjEp0bIVU8cV7OShpjL1k= +github.com/prometheus/common v0.47.0/go.mod h1:0/KsvlIEfPQCQ5I2iNSAWKPZziNCvRs5EC6ILDTlAPc= github.com/prometheus/procfs v0.0.0-20180725123919-05ee40e3a273/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= +github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= +github.com/prometheus/procfs v0.0.0-20190117184657-bf6a532e95b1/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= +github.com/prometheus/procfs v0.0.2/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA= +github.com/prometheus/procfs v0.0.8/go.mod h1:7Qr8sr6344vo1JqZ6HhLceV9o3AJ1Ff+GxbHq6oeK9A= +github.com/prometheus/procfs v0.1.3/go.mod h1:lV6e/gmhEcM9IjHGsFOCxxuZ+z1YqCvr4OA4YeYWdaU= +github.com/prometheus/procfs v0.3.0/go.mod h1:lV6e/gmhEcM9IjHGsFOCxxuZ+z1YqCvr4OA4YeYWdaU= github.com/prometheus/procfs v0.12.0 h1:jluTpSng7V9hY0O2R9DzzJHYb2xULk9VTR1V1R/k6Bo= github.com/prometheus/procfs v0.12.0/go.mod h1:pcuDEFsWDnvcgNzo4EEweacyhjeA9Zk3cnaOZAZEfOo= github.com/quic-go/qpack v0.4.0 h1:Cr9BXA1sQS2SmDUWjSofMPNKmvF6IiIfDRmgU0w1ZCo= github.com/quic-go/qpack v0.4.0/go.mod h1:UZVnYIfi5GRk+zI9UMaCPsmZ2xKJP7XBUvVyT1Knj9A= -github.com/quic-go/qtls-go1-20 v0.4.1 h1:D33340mCNDAIKBqXuAvexTNMUByrYmFYVfKfDN5nfFs= -github.com/quic-go/qtls-go1-20 v0.4.1/go.mod h1:X9Nh97ZL80Z+bX/gUXMbipO6OxdiDi58b/fMC9mAL+k= -github.com/quic-go/quic-go v0.40.1 h1:X3AGzUNFs0jVuO3esAGnTfvdgvL4fq655WaOi1snv1Q= -github.com/quic-go/quic-go v0.40.1/go.mod h1:PeN7kuVJ4xZbxSv/4OX6S1USOX8MJvydwpTx31vx60c= +github.com/quic-go/quic-go v0.42.0 h1:uSfdap0eveIl8KXnipv9K7nlwZ5IqLlYOpJ58u5utpM= +github.com/quic-go/quic-go v0.42.0/go.mod h1:132kz4kL3F9vxhW3CtQJLDVwcFe5wdWeJXXijhsO57M= github.com/quic-go/webtransport-go v0.6.0 h1:CvNsKqc4W2HljHJnoT+rMmbRJybShZ0YPFDD3NxaZLY= github.com/quic-go/webtransport-go v0.6.0/go.mod h1:9KjU4AEBqEQidGHNDkZrb8CAa1abRaosM2yGOyiikEc= github.com/raulk/go-watchdog v1.3.0 h1:oUmdlHxdkXRJlwfG0O9omj8ukerm8MEQavSiDTEtBsk= github.com/raulk/go-watchdog v1.3.0/go.mod h1:fIvOnLbF0b0ZwkB9YU4mOW9Did//4vPZtDqv66NfsMU= +github.com/rcrowley/go-metrics v0.0.0-20181016184325-3113b8401b8a/go.mod h1:bCqnVzQkZxMG4s8nGwiZ5l3QUCyqpo9Y+/ZMZ9VjZe4= +github.com/rcrowley/go-metrics v0.0.0-20201227073835-cf1acfcdf475 h1:N/ElC8H3+5XpJzTSTfLsJV/mx9Q9g7kxmchpfZyxgzM= +github.com/rcrowley/go-metrics v0.0.0-20201227073835-cf1acfcdf475/go.mod h1:bCqnVzQkZxMG4s8nGwiZ5l3QUCyqpo9Y+/ZMZ9VjZe4= +github.com/rogpeppe/fastuuid v0.0.0-20150106093220-6724a57986af/go.mod h1:XWv6SoW27p1b0cqNHllgS5HIMJraePCO15w5zCzIWYg= +github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ= github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= +github.com/rogpeppe/go-internal v1.9.0/go.mod h1:WtVeX8xhTBvf0smdhujwtBcq4Qrzq/fJaraNFVN+nFs= github.com/rogpeppe/go-internal v1.12.0 h1:exVL4IDcn6na9z1rAb56Vxr+CgyK3nn3O+epU5NdKM8= github.com/rogpeppe/go-internal v1.12.0/go.mod h1:E+RYuTGaKKdloAfM02xzb0FW3Paa99yedzYV+kq4uf4= +github.com/rs/cors v1.7.0/go.mod h1:gFx+x8UowdsKA9AchylcLynDq+nNFfI8FkUZdN/jGCU= +github.com/rs/cors v1.10.1 h1:L0uuZVXIKlI1SShY2nhFfo44TYvDPQ1w4oFkUJNfhyo= +github.com/rs/cors v1.10.1/go.mod h1:XyqrcTp5zjWr1wsJ8PIRZssZ8b/WMcMf71DJnit4EMU= +github.com/rs/xid v1.5.0/go.mod h1:trrq9SKmegXys3aeAKXMUTdJsYXVwGY3RLcfgqegfbg= +github.com/rs/zerolog v1.32.0 h1:keLypqrlIjaFsbmJOBdB/qvyF8KEtCWHwobLp5l/mQ0= +github.com/rs/zerolog v1.32.0/go.mod h1:/7mN4D5sKwJLZQ2b/znpjC3/GQWY/xaDXUM0kKWRHss= github.com/russross/blackfriday v1.5.2/go.mod h1:JO/DiYxRf+HjHt06OyowR9PTA263kcR/rfWxYHBV53g= github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk= github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= +github.com/ryanuber/columnize v0.0.0-20160712163229-9b3edd62028f/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts= github.com/sagikazarmark/locafero v0.4.0 h1:HApY1R9zGo4DBgr7dqsTH/JJxLTTsOt7u6keLGt6kNQ= github.com/sagikazarmark/locafero v0.4.0/go.mod h1:Pe1W6UlPYUk/+wc/6KFhbORCfqzgYEpgQ3O5fPuL3H4= github.com/sagikazarmark/slog-shim v0.1.0 h1:diDBnUNK9N/354PgrxMywXnAwEr1QZcOr6gto+ugjYE= github.com/sagikazarmark/slog-shim v0.1.0/go.mod h1:SrcSrq8aKtyuqEI1uvTDTK1arOWRIczQRv+GVI1AkeQ= github.com/samber/lo v1.39.0 h1:4gTz1wUhNYLhFSKl6O+8peW0v2F4BCY034GRpU9WnuA= github.com/samber/lo v1.39.0/go.mod h1:+m/ZKRl6ClXCE2Lgf3MsQlWfh4bn1bz6CXEOxnEXnEA= +github.com/samuel/go-zookeeper v0.0.0-20190923202752-2cc03de413da/go.mod h1:gi+0XIa01GRL2eRQVjQkKGqKF3SF9vZR/HnPullcV2E= +github.com/sasha-s/go-deadlock v0.3.1 h1:sqv7fDNShgjcaxkO0JNcOAlr8B9+cV5Ey/OB71efZx0= +github.com/sasha-s/go-deadlock v0.3.1/go.mod h1:F73l+cr82YSh10GxyRI6qZiCgK64VaZjwesgfQ1/iLM= +github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc= github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo= github.com/shurcooL/component v0.0.0-20170202220835-f88ec8f54cc4/go.mod h1:XhFIlyj5a1fBNx5aJTbKoIq0mNaPvOagO+HjB3EtxrY= github.com/shurcooL/events v0.0.0-20181021180414-410e4ca65f48/go.mod h1:5u70Mqkb5O5cxEA8nxTsgrgLehJeAw6Oc4Ab1c/P1HM= @@ -499,36 +1033,66 @@ github.com/shurcooL/sanitized_anchor_name v0.0.0-20170918181015-86672fcb3f95/go. github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc= github.com/shurcooL/users v0.0.0-20180125191416-49c67e49c537/go.mod h1:QJTqeLYEDaXHZDBsXlPCDqdhQuJkuw4NOtaxYe3xii4= github.com/shurcooL/webdavfs v0.0.0-20170829043945-18c3829fa133/go.mod h1:hKmq5kWdCj2z2KEozexVbfEZIWiTjhE0+UjmZgPqehw= +github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo= +github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE= +github.com/sirupsen/logrus v1.6.0/go.mod h1:7uNnSEd1DgxDLC74fIahvMZmmYsHGZGEOFrfsX/uA88= github.com/sirupsen/logrus v1.7.0/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0= +github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ= +github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= +github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc= github.com/smartystreets/assertions v1.2.0 h1:42S6lae5dvLc7BrLu/0ugRtcFVjoJNMC/N3yZFZkDFs= github.com/smartystreets/assertions v1.2.0/go.mod h1:tcbTF8ujkAEcZ8TElKY+i30BzYlVhC/LOxJk7iOWnoo= +github.com/smartystreets/goconvey v1.6.4/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA= github.com/smartystreets/goconvey v1.7.2 h1:9RBaZCeXEQ3UselpuwUQHltGVXvdwm6cv1hgR6gDIPg= github.com/smartystreets/goconvey v1.7.2/go.mod h1:Vw0tHAZW6lzCRk3xgdin6fKYcG+G3Pg9vgXWeJpQFMM= +github.com/soheilhy/cmux v0.1.4/go.mod h1:IM3LyeVVIOuxMH7sFAkER9+bJ4dT7Ms6E4xg4kGIyLM= +github.com/sony/gobreaker v0.4.1/go.mod h1:ZKptC7FHNvhBz7dN2LGjPVBz2sZJmc0/PkyDJOjmxWY= github.com/sourcegraph/annotate v0.0.0-20160123013949-f4cad6c6324d/go.mod h1:UdhH50NIW0fCiwBSr0co2m7BnFLdv4fQTgdqdJTHFeE= github.com/sourcegraph/conc v0.3.0 h1:OQTbbt6P72L20UqAkXXuLOj79LfEanQ+YQFNpLA9ySo= github.com/sourcegraph/conc v0.3.0/go.mod h1:Sdozi7LEKbFPqYX2/J+iBAM6HpqSLTASQIKqDmF7Mt0= github.com/sourcegraph/syntaxhighlight v0.0.0-20170531221838-bd320f5d308e/go.mod h1:HuIsMU8RRBOtsCgI77wP899iHVBQpCmg4ErYMZB+2IA= github.com/sourcenetwork/badger/v4 v4.2.1-0.20231113215945-a63444ca5276 h1:TpQDDPfucDgCNH0NVqVUk6SSq6T6G8p9HIocmwZh9Tg= github.com/sourcenetwork/badger/v4 v4.2.1-0.20231113215945-a63444ca5276/go.mod h1:lxiZTDBw0vheFMqSwX2OvB6RTDI1+/UtVCSU4rpThFM= +github.com/sourcenetwork/corelog v0.0.7 h1:vztssVAUDcsYN5VUOW3PKYhLprHfzoc8UbKewQuD1qw= +github.com/sourcenetwork/corelog v0.0.7/go.mod h1:cMabHgs3kARgYTQeQYSOmaGGP8XMU6sZrHd8LFrL3zA= github.com/sourcenetwork/go-libp2p-pubsub-rpc v0.0.13 h1:d/PeGZutd5NcDr6ltAv8ubN5PxsHMp1YUnhHY/QCWB4= github.com/sourcenetwork/go-libp2p-pubsub-rpc v0.0.13/go.mod h1:jUoQv592uUX1u7QBjAY4C+l24X9ArhPfifOqXpDHz4U= github.com/sourcenetwork/graphql-go v0.7.10-0.20231113214537-a9560c1898dd h1:lmpW39/8wPJ0khWRhOcj7Bj0HYKbSmQ8rXMJw1cMB8U= github.com/sourcenetwork/graphql-go v0.7.10-0.20231113214537-a9560c1898dd/go.mod h1:rkahXkgRH/3vZErN1Bx+qt1+w+CV5fgaJyKKWgISe4U= github.com/sourcenetwork/immutable v0.3.0 h1:gHPtGvLrTBTK5YpDAhMU+u+S8v1F6iYmc3nbZLryMdc= github.com/sourcenetwork/immutable v0.3.0/go.mod h1:GD7ceuh/HD7z6cdIwzKK2ctzgZ1qqYFJpsFp+8qYnbI= +github.com/sourcenetwork/raccoondb v0.2.0 h1:lQ/r8IUm1IMaivXWhqndgpisLsI59c6M9jn6ujKYBzk= +github.com/sourcenetwork/raccoondb v0.2.0/go.mod h1:A5ElVAhdf9yDjmpLrA3DLqYib09Fnuzm3sFUbY5r9BE= +github.com/sourcenetwork/sourcehub v0.2.1-0.20240305165631-9b75b1000724 h1:Dr13Lb9bTmycQZbNHAP+7RUVcy9g6jxL5rz74ipVyrs= +github.com/sourcenetwork/sourcehub v0.2.1-0.20240305165631-9b75b1000724/go.mod h1:jhWsUtCgIE6vDKg9/uvu1rXAOcVTrALjBXf2kLQGrCk= +github.com/sourcenetwork/zanzi v0.3.0 h1:Y9uyrpsT569QjzAxNOwWDxeWOkcntm+26qDLR7nGuo4= +github.com/sourcenetwork/zanzi v0.3.0/go.mod h1:eLQ94tdz96vfwHIZXL5ZoHbV9YHQeMyFeTc5hFSGDRU= +github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA= github.com/spaolacci/murmur3 v1.1.0 h1:7c1g84S4BPRrfL5Xrdp6fOJ206sU9y293DDHaoy0bLI= github.com/spaolacci/murmur3 v1.1.0/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA= +github.com/spf13/afero v1.1.2/go.mod h1:j4pytiNVoe2o6bmDsKpLACNPDBIoEAkihy7loJ1B0CQ= github.com/spf13/afero v1.11.0 h1:WJQKhtpdm3v2IzqG8VMqrr6Rf3UYpEF239Jy9wNepM8= github.com/spf13/afero v1.11.0/go.mod h1:GH9Y3pIexgf1MTIWtNGyogA5MwRIDXGUr+hbWNoBjkY= +github.com/spf13/cast v1.3.0/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE= github.com/spf13/cast v1.6.0 h1:GEiTHELF+vaR5dhz3VqZfFSzZjYbgeKDpBxQVS4GYJ0= github.com/spf13/cast v1.6.0/go.mod h1:ancEpBxwJDODSW/UG4rDrAqiKolqNNh2DX3mk86cAdo= +github.com/spf13/cobra v0.0.3/go.mod h1:1l0Ry5zgKvJasoi3XT1TypsSe7PqH0Sj9dhYf7v3XqQ= +github.com/spf13/cobra v0.0.5/go.mod h1:3K3wKZymM7VvHMDS9+Akkh4K60UwM26emMESw8tLCHU= github.com/spf13/cobra v1.8.0 h1:7aJaZx1B85qltLMc546zn58BxxfZdR/W22ej9CFoEf0= github.com/spf13/cobra v1.8.0/go.mod h1:WXLWApfZ71AjXPya3WOlMsY9yMs7YeiHhFVlvLyhcho= +github.com/spf13/jwalterweatherman v1.0.0/go.mod h1:cQK4TGJAtQXfYWX+Ddv3mKDzgVb68N+wFjFa4jdeBTo= +github.com/spf13/pflag v1.0.1/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= +github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= +github.com/spf13/viper v1.3.2/go.mod h1:ZiWeW+zYFKm7srdB9IoDzzZXaJaI5eL9QjNiN/DMA2s= github.com/spf13/viper v1.18.2 h1:LUXCnvUvSM6FXAsj6nnfc8Q2tp1dIgUfY9Kc8GsSOiQ= github.com/spf13/viper v1.18.2/go.mod h1:EKmWIqdnk5lOcmR72yw6hS+8OPYcwD0jteitLMVB+yk= +github.com/streadway/amqp v0.0.0-20190404075320-75d898a42a94/go.mod h1:AZpEONHx3DKn8O/DFsRAY58/XVQiIPMTMB1SddzLXVw= +github.com/streadway/amqp v0.0.0-20190827072141-edfb9018d271/go.mod h1:AZpEONHx3DKn8O/DFsRAY58/XVQiIPMTMB1SddzLXVw= +github.com/streadway/handy v0.0.0-20190108123426-d5acb3125c2a/go.mod h1:qNTQ5P5JnDBl6z3cMAg/SywNDC5ABu5ApDIw6lUbRmI= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= github.com/stretchr/objx v0.5.2 h1:xuMeJ0Sdp5ZMRXx/aWO6RZxdr3beISkG5/G/aIRr3pY= @@ -540,6 +1104,7 @@ github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5 github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.7.2/go.mod h1:R6va5+xMeoiuVRoj+gSkQ7d3FALtqAAGI1FQKckRals= github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= @@ -547,9 +1112,15 @@ github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsT github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= github.com/subosito/gotenv v1.6.0 h1:9NlTDc1FTs4qu0DDq7AEtTPNw6SVm7uBMsUCUjABIf8= github.com/subosito/gotenv v1.6.0/go.mod h1:Dk4QP5c2W3ibzajGcXpNraDfq2IrhjMIvMSWPKKo0FU= -github.com/syndtr/goleveldb v1.0.1-0.20210819022825-2ae1ddf74ef7 h1:epCh84lMvA70Z7CTTCmYQn2CKbY8j86K7/FAIr141uY= -github.com/syndtr/goleveldb v1.0.1-0.20210819022825-2ae1ddf74ef7/go.mod h1:q4W45IWZaF22tdD+VEXcAWRA037jwmWEB5VWYORlTpc= +github.com/syndtr/goleveldb v1.0.1-0.20220721030215-126854af5e6d h1:vfofYNRScrDdvS342BElfbETmL1Aiz3i2t0zfRj16Hs= +github.com/syndtr/goleveldb v1.0.1-0.20220721030215-126854af5e6d/go.mod h1:RRCYJbIwD5jmqPI9XoAFR0OcDxqUctll6zUj/+B4S48= github.com/tarm/serial v0.0.0-20180830185346-98f6abe2eb07/go.mod h1:kDXzergiv9cbyO7IOYJZWg1U88JhDg3PB6klq9Hg2pA= +github.com/tendermint/go-amino v0.16.0 h1:GyhmgQKvqF82e2oZeuMSp9JTN0N09emoSZlb2lyGa2E= +github.com/tendermint/go-amino v0.16.0/go.mod h1:TQU0M1i/ImAo+tYpZi73AU3V/dKeCoMC9Sphe2ZwGME= +github.com/tendermint/tm-db v0.6.7 h1:fE00Cbl0jayAoqlExN6oyQJ7fR/ZtoVOmvPJ//+shu8= +github.com/tendermint/tm-db v0.6.7/go.mod h1:byQDzFkZV1syXr/ReXS808NxA2xvyuuVgXOJ/088L6I= +github.com/teserakt-io/golang-ed25519 v0.0.0-20210104091850-3888c087a4c8 h1:RBkacARv7qY5laaXGlF4wFB/tk5rnthhPb8oIBGoagY= +github.com/teserakt-io/golang-ed25519 v0.0.0-20210104091850-3888c087a4c8/go.mod h1:9PdLyPiZIiW3UopXyRnPYyjUXSpiQNHRLu8fOsR3o8M= github.com/textileio/go-datastore-extensions v1.0.1 h1:qIJGqJaigQ1wD4TdwS/hf73u0HChhXvvUSJuxBEKS+c= github.com/textileio/go-datastore-extensions v1.0.1/go.mod h1:Pzj9FDRkb55910dr/FX8M7WywvnS26gBgEDez1ZBuLE= github.com/textileio/go-ds-badger3 v0.1.0 h1:q0kBuBmAcRUR3ClMSYlyw0224XeuzjjGinU53Qz1uXI= @@ -558,8 +1129,15 @@ github.com/textileio/go-log/v2 v2.1.3-gke-2 h1:YkMA5ua0Cf/X6CkbexInsoJ/HdaHQBlgi github.com/textileio/go-log/v2 v2.1.3-gke-2/go.mod h1:DwACkjFS3kjZZR/4Spx3aPfSsciyslwUe5bxV8CEU2w= github.com/tidwall/btree v1.7.0 h1:L1fkJH/AuEh5zBnnBbmTwQ5Lt+bRJ5A8EWecslvo9iI= github.com/tidwall/btree v1.7.0/go.mod h1:twD9XRA5jj9VUQGELzDO4HPQTNJsoWWfYEL+EUQ2cKY= +github.com/tmc/grpc-websocket-proxy v0.0.0-20170815181823-89b8d40f7ca8/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U= +github.com/tv42/httpunix v0.0.0-20150427012821-b75d8614f926/go.mod h1:9ESjWnEqriFuLhtthL60Sar/7RFoluCcXsuvEwTV5KM= +github.com/ugorji/go v1.1.7/go.mod h1:kZn38zHttfInRq0xu/PH0az30d+z6vm202qpg1oXVMw= +github.com/ugorji/go/codec v0.0.0-20181204163529-d75b2dcb6bc8/go.mod h1:VFNgLljTbGfSG7qAOspJ7OScBnGdDN/yBr0sguwnwf0= +github.com/ugorji/go/codec v1.1.7/go.mod h1:Ax+UKWsSmolVDwsd+7N3ZtXu+yMGCf907BLYF3GoBXY= github.com/ugorji/go/codec v1.2.12 h1:9LC83zGrHhuUA9l16C9AHXAqEV/2wBQ4nkvumAE65EE= github.com/ugorji/go/codec v1.2.12/go.mod h1:UNopzCgEMSXjBc6AOMqYvWC1ktqTAfzJZUZgYf6w6lg= +github.com/urfave/cli v1.20.0/go.mod h1:70zkFmudgCuE/ngEzBv17Jvp/497gISqfk5gWijbERA= +github.com/urfave/cli v1.22.1/go.mod h1:Gos4lmkARVdJ6EkW0WaNv/tZAAMe9V7XWyB60NtXRu0= github.com/urfave/cli v1.22.2/go.mod h1:Gos4lmkARVdJ6EkW0WaNv/tZAAMe9V7XWyB60NtXRu0= github.com/urfave/cli v1.22.10/go.mod h1:Gos4lmkARVdJ6EkW0WaNv/tZAAMe9V7XWyB60NtXRu0= github.com/valyala/fastjson v1.6.4 h1:uAUNq9Z6ymTgGhcm0UynUAB6tlbakBrz6CQFax3BXVQ= @@ -580,22 +1158,47 @@ github.com/whyrusleeping/go-keyspace v0.0.0-20160322163242-5b898ac5add1 h1:EKhdz github.com/whyrusleeping/go-keyspace v0.0.0-20160322163242-5b898ac5add1/go.mod h1:8UvriyWtv5Q5EOgjHaSseUEdkQfvwFv1I/In/O2M9gc= github.com/x448/float16 v0.8.4 h1:qLwI1I70+NjRFUR3zs1JPUCgaCXSh3SW62uAKT1mSBM= github.com/x448/float16 v0.8.4/go.mod h1:14CWIYCyZA/cWjXOioeEpHeN/83MdbZDRQHoFcYsOfg= +github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU= +github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb h1:zGWFAtiMcyryUHoUjUJX0/lt1H2+i2Ka2n+D3DImSNo= +github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU= +github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 h1:EzJWgHovont7NscjpAxXsDA8S8BMYve8Y5+7cuRE7R0= +github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415/go.mod h1:GwrjFmJcFw6At/Gs6z4yjiIwzuJ1/+UwLxMQDVQXShQ= +github.com/xeipuuv/gojsonschema v1.2.0 h1:LhYJRs+L4fBtjZUfuSZIKGeVu0QRy8e5Xi7D17UxZ74= +github.com/xeipuuv/gojsonschema v1.2.0/go.mod h1:anYRn/JVcOK2ZgGU+IjEV4nwlhoK5sQluxsYJ78Id3Y= +github.com/xiang90/probing v0.0.0-20190116061207-43a291ad63a2/go.mod h1:UETIi67q53MR2AWcXfiuqkDkRtnGDLqkBTpCHuJHxtU= +github.com/xordataexchange/crypt v0.0.3-0.20170626215501-b2862e3d0a77/go.mod h1:aYKd//L2LvnjZzWKhF00oedf4jCCReLcmhLdhm1A27Q= github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= +github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= +github.com/zondax/hid v0.9.2 h1:WCJFnEDMiqGF64nlZz28E9qLVZ0KSJ7xpc5DLEyma2U= +github.com/zondax/hid v0.9.2/go.mod h1:l5wttcP0jwtdLjqjMMWFVEE7d1zO0jvSPA9OPZxWpEM= +github.com/zondax/ledger-go v0.14.3 h1:wEpJt2CEcBJ428md/5MgSLsXLBos98sBOyxNmCjfUCw= +github.com/zondax/ledger-go v0.14.3/go.mod h1:IKKaoxupuB43g4NxeQmbLXv7T9AlQyie1UpHb342ycI= +go.etcd.io/bbolt v1.3.3/go.mod h1:IbVyRI1SCnLcuJnV2u8VeU0CEYM7e686BmAb1XKL+uU= +go.etcd.io/bbolt v1.3.8 h1:xs88BrvEv273UsB79e0hcVrlUWmS0a8upikMFhSyAtA= +go.etcd.io/bbolt v1.3.8/go.mod h1:N9Mkw9X8x5fupy0IKsmuqVtoGDyxsaDlbk4Rd05IAQw= +go.etcd.io/etcd v0.0.0-20191023171146-3cf2f69b5738/go.mod h1:dnLIgRNXwCJa5e+c6mIZCrds/GIG4ncV9HhK5PX7jPg= go.opencensus.io v0.18.0/go.mod h1:vKdFvxhtzZ9onBp9VKHK8z/sRpBMnKAsufL7wlDrCOA= +go.opencensus.io v0.20.1/go.mod h1:6WKK9ahsWS3RSO+PY9ZHZUfv2irvY6gN279GOPZjmmk= +go.opencensus.io v0.20.2/go.mod h1:6WKK9ahsWS3RSO+PY9ZHZUfv2irvY6gN279GOPZjmmk= +go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= go.opencensus.io v0.24.0 h1:y73uSU6J157QMP2kn2r30vwW1A2W2WFwSCGnAVxeaD0= go.opencensus.io v0.24.0/go.mod h1:vNK8G9p7aAivkbmorf4v+7Hgx+Zs0yY+0fOtgBfjQKo= -go.opentelemetry.io/otel v1.24.0 h1:0LAOdjNmQeSTzGBzduGe/rU4tZhMwL5rWgtp9Ku5Jfo= -go.opentelemetry.io/otel v1.24.0/go.mod h1:W7b9Ozg4nkF5tWI5zsXkaKKDjdVjpD4oAt9Qi/MArHo= -go.opentelemetry.io/otel/metric v1.24.0 h1:6EhoGWWK28x1fbpA4tYTOWBkPefTDQnb8WSGXlc88kI= -go.opentelemetry.io/otel/metric v1.24.0/go.mod h1:VYhLe1rFfxuTXLgj4CBiyz+9WYBA8pNGJgDcSFRKBco= -go.opentelemetry.io/otel/sdk v1.24.0 h1:YMPPDNymmQN3ZgczicBY3B6sf9n62Dlj9pWD3ucgoDw= -go.opentelemetry.io/otel/sdk v1.24.0/go.mod h1:KVrIYw6tEubO9E96HQpcmpTKDVn9gdv35HoYiQWGDFg= -go.opentelemetry.io/otel/sdk/metric v1.24.0 h1:yyMQrPzF+k88/DbH7o4FMAs80puqd+9osbiBrJrz/w8= -go.opentelemetry.io/otel/sdk/metric v1.24.0/go.mod h1:I6Y5FjH6rvEnTTAYQz3Mmv2kl6Ek5IIrmwTLqMrrOE0= -go.opentelemetry.io/otel/trace v1.24.0 h1:CsKnnL4dUAr/0llH9FKuc698G04IrpWV0MQA/Y1YELI= -go.opentelemetry.io/otel/trace v1.24.0/go.mod h1:HPc3Xr/cOApsBI154IU0OI0HJexz+aw5uPdbs3UCjNU= +go.opentelemetry.io/otel v1.26.0 h1:LQwgL5s/1W7YiiRwxf03QGnWLb2HW4pLiAhaA5cZXBs= +go.opentelemetry.io/otel v1.26.0/go.mod h1:UmLkJHUAidDval2EICqBMbnAd0/m2vmpf/dAM+fvFs4= +go.opentelemetry.io/otel/metric v1.26.0 h1:7S39CLuY5Jgg9CrnA9HHiEjGMF/X2VHvoXGgSllRz30= +go.opentelemetry.io/otel/metric v1.26.0/go.mod h1:SY+rHOI4cEawI9a7N1A4nIg/nTQXe1ccCNWYOJUrpX4= +go.opentelemetry.io/otel/sdk v1.26.0 h1:Y7bumHf5tAiDlRYFmGqetNcLaVUZmh4iYfmGxtmz7F8= +go.opentelemetry.io/otel/sdk v1.26.0/go.mod h1:0p8MXpqLeJ0pzcszQQN4F0S5FVjBLgypeGSngLsmirs= +go.opentelemetry.io/otel/sdk/metric v1.26.0 h1:cWSks5tfriHPdWFnl+qpX3P681aAYqlZHcAyHw5aU9Y= +go.opentelemetry.io/otel/sdk/metric v1.26.0/go.mod h1:ClMFFknnThJCksebJwz7KIyEDHO+nTB6gK8obLy8RyE= +go.opentelemetry.io/otel/trace v1.26.0 h1:1ieeAUb4y0TE26jUFrCIXKpTuVK7uJGN9/Z/2LP5sQA= +go.opentelemetry.io/otel/trace v1.26.0/go.mod h1:4iDxvGDQuUkHve82hJJ8UqrwswHYsZuWCBllGV2U2y0= +go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI= +go.uber.org/atomic v1.3.2/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= +go.uber.org/atomic v1.4.0/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= +go.uber.org/atomic v1.5.0/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ= go.uber.org/atomic v1.6.0/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ= go.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc= go.uber.org/atomic v1.11.0 h1:ZvwS0R+56ePWxUNi+Atn9dWONBPp/AUETXlHW0DxSjE= @@ -604,74 +1207,117 @@ go.uber.org/dig v1.17.1 h1:Tga8Lz8PcYNsWsyHMZ1Vm0OQOUaJNDyvPImgbAu9YSc= go.uber.org/dig v1.17.1/go.mod h1:Us0rSJiThwCv2GteUN0Q7OKvU7n5J4dxZ9JKUXozFdE= go.uber.org/fx v1.20.1 h1:zVwVQGS8zYvhh9Xxcu4w1M6ESyeMzebzj2NbSayZ4Mk= go.uber.org/fx v1.20.1/go.mod h1:iSYNbHf2y55acNCwCXKx7LbWb5WG1Bnue5RDXz1OREg= +go.uber.org/goleak v1.1.10/go.mod h1:8a7PlsEVH3e/a/GLqe5IIrQx6GzcnRmZEufDUTk4A7A= go.uber.org/goleak v1.1.11-0.20210813005559-691160354723/go.mod h1:cwTWslyiVhfpKIDGSZEM2HlOvcqm+tG4zioyIeLoqMQ= go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto= go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE= go.uber.org/mock v0.4.0 h1:VcM4ZOtdbR4f6VXfiOpwpVJDL6lCReaZ6mw31wqh7KU= go.uber.org/mock v0.4.0/go.mod h1:a6FSlNadKUHUa9IP5Vyt1zh4fC7uAwxMutEAscFbkZc= +go.uber.org/multierr v1.1.0/go.mod h1:wR5kodmAFQ0UK8QlbwjlSNy0Z68gJhDJUG5sjR94q/0= +go.uber.org/multierr v1.3.0/go.mod h1:VgVr7evmIr6uPjLBxg28wmKNXyqE9akIJ5XnfpiKl+4= go.uber.org/multierr v1.5.0/go.mod h1:FeouvMocqHpRaaGuG9EjoKcStLC43Zu/fmqdUMPcKYU= go.uber.org/multierr v1.6.0/go.mod h1:cdWPpRnG4AhwMwsgIHip0KRBQjJy5kYEpYjJxpXp9iU= go.uber.org/multierr v1.11.0 h1:blXXJkSxSSfBVBlC76pxqeO+LN3aDfLQo+309xJstO0= go.uber.org/multierr v1.11.0/go.mod h1:20+QtiLqy0Nd6FdQB9TLXag12DsQkrbs3htMFfDN80Y= go.uber.org/tools v0.0.0-20190618225709-2cfd321de3ee/go.mod h1:vJERXedbb3MVM5f9Ejo0C68/HhF8uaILCdgjnY+goOA= +go.uber.org/zap v1.10.0/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q= +go.uber.org/zap v1.13.0/go.mod h1:zwrFLgMcdUuIBviXEYEH1YKNaOBnKXsx2IPda5bBwHM= go.uber.org/zap v1.16.0/go.mod h1:MA8QOfq0BHJwdXa996Y4dYkAqRKB8/1K1QMMZVaNZjQ= +go.uber.org/zap v1.18.1/go.mod h1:xg/QME4nWcxGxrpdeYfq7UvYrLh66cuVKdrbD1XF/NI= go.uber.org/zap v1.19.1/go.mod h1:j3DNczoxDZroyBnOT1L/Q79cfUMGZxlv/9dzN7SM1rI= go.uber.org/zap v1.27.0 h1:aJMhYGrd5QSmlpLMr2MftRKl7t8J8PTZPA732ud/XR8= go.uber.org/zap v1.27.0/go.mod h1:GB2qFLM7cTU87MWRP2mPIjqfIDnGu+VIO4V/SdhGo2E= go4.org v0.0.0-20180809161055-417644f6feb5/go.mod h1:MkTOUMDaeVYJUOUsaDXIhWPZYa1yOyC1qaOBpL57BhE= golang.org/x/build v0.0.0-20190111050920-041ab4dc3f9d/go.mod h1:OWs+y06UdEOHN4y+MfF/py+xQ/tYqIWW03b70/CG9Rw= +golang.org/x/crypto v0.0.0-20170930174604-9419663f5a44/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= +golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= +golang.org/x/crypto v0.0.0-20181029021203-45a5f77698d3/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= golang.org/x/crypto v0.0.0-20181030102418-4d3f4d9ffa16/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= +golang.org/x/crypto v0.0.0-20181203042331-505ab145d0a9/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20190313024323-a1f597ede03a/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20190611184440-5c40567a22f8/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20190701094942-4def268fd1a4/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20200115085410-6d4e4cb37c7d/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20200302210943-78000ba7a073/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20200602180216-279210d13fed/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20200728195943-123391ffb6de/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20210322153248-0c34fe9e7dc2/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4= -golang.org/x/crypto v0.19.0 h1:ENy+Az/9Y1vSrlrvBSyna3PITt4tiZLf7sgCjZBX7Wo= +golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU= +golang.org/x/crypto v0.21.0 h1:X31++rzVUdKhX5sWmSOFZxx8UW/ldWx55cbf08iNAMA= +golang.org/x/crypto v0.21.0/go.mod h1:0BP7YvVV9gBbVKyeTG0Gyn+gZm94bibOW5BjDEYAOMs= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= -golang.org/x/exp v0.0.0-20240103183307-be819d1f06fc h1:ao2WRsKSzW6KuUY9IWPwWahcHCgR0s52IfwutMfEbdM= -golang.org/x/exp v0.0.0-20240103183307-be819d1f06fc/go.mod h1:iRJReGqOEeBhDZGkGbynYwcHlctCvnjTYIamk7uXpHI= +golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= +golang.org/x/exp v0.0.0-20200331195152-e8c3332aa8e5/go.mod h1:4M0jN8W1tt0AVLNr8HDosyJCDCDuyL9N9+3m7wDWgKw= +golang.org/x/exp v0.0.0-20240222234643-814bf88cf225 h1:LfspQV/FYTatPTr/3HzIcmiUFH7PGP+OQ6mgDYo3yuQ= +golang.org/x/exp v0.0.0-20240222234643-814bf88cf225/go.mod h1:CxmFvTBINI24O/j8iY7H1xHzx2i4OsyguNBmN/uPtqc= +golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= +golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= golang.org/x/lint v0.0.0-20180702182130-06c8688daad7/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= +golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= golang.org/x/lint v0.0.0-20200302205851-738671d3881b/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= +golang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028/go.mod h1:E/iHnbuqvinMTCcRqshq8CkpyQDoeVncDDYHnLhea+o= golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc= golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= +golang.org/x/mod v0.1.1-0.20191107180719-034126e5016b/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/mod v0.14.0 h1:dGoOF9QVLYng8IHTm7BAyWqCqSheQ5pYWGhzW00YJr0= -golang.org/x/mod v0.14.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= +golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= +golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= +golang.org/x/mod v0.15.0 h1:SernR4v+D55NyBH2QiEQrlBAnj1ECL6AGrA5+dPaMY8= +golang.org/x/mod v0.15.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20181023162649-9b4f9f5ad519/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20181029044818-c44066c5c816/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20181106065722-10aee1819953/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20181201002055-351d144fa1fc/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20181220203305-927f97764cc3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190125091013-d26f9f9a57f3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190313220215-9f648a60d977/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks= +golang.org/x/net v0.0.0-20190613194153-d28f0bde5980/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190813141303-74dc4d7220e7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200421231249-e086a090c8fd/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= golang.org/x/net v0.0.0-20200520004742-59133d7f0dd7/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= -golang.org/x/net v0.0.0-20200813134508-3edf25e44fcc/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= +golang.org/x/net v0.0.0-20200625001655-4c5254603344/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= +golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= golang.org/x/net v0.0.0-20201110031124-69a78807bb2b/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= golang.org/x/net v0.0.0-20210119194325-5f4716e94777/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM= -golang.org/x/net v0.21.0 h1:AQyQV4dYCvJ7vGmJyKki9+PBdyvhkSd8EIx/qb0AYv4= -golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44= +golang.org/x/net v0.0.0-20210428140749-89ef3d95e781/go.mod h1:OJAsFXCWl8Ukc7SiCT/9KSuxbyM7479/AVlXFRxuMCk= +golang.org/x/net v0.0.0-20210805182204-aaa1db679c0d/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20220225172249-27dd8689420f/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= +golang.org/x/net v0.0.0-20220607020251-c690dde0001d/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= +golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= +golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= +golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg= +golang.org/x/net v0.23.0 h1:7EYJ93RZ9vYSZAIb2x3lnuvqO5zneoD6IvWjuhfxjTs= +golang.org/x/net v0.23.0/go.mod h1:JKghWKKOSdJwpW2GEx0Ja7fmaKnMsbu+MWVZTokSYmg= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20181017192945-9dcd33a902f4/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20181203162652-d668ce993890/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/perf v0.0.0-20180704124530-6e6d33e29852/go.mod h1:JLpeXjPJfIyPr5TlbXLkXWLhP8nz10XfvxElABhCtcw= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= @@ -681,45 +1327,101 @@ golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJ golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.6.0 h1:5BMeUDZ7vkXGfEr1x9B4bRcTH4lpkTkpdh0T/J+qjbQ= golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sys v0.0.0-20180810173357-98c5dad5d1a0/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20181026203630-95b1ffbd15a5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20181029174526-d69651ed3497/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20181107165924-66b7b1311ac8/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20181122145206-62eef0e2fa9b/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20181205085412-a5c9d58dba9a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190130150945-aca44879d564/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190316082340-a2f829d7f35f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190626221950-04f50cda93cb/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190826190057-c7b8b68b1456/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190904154756-749cb33beabd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191005200804-aed5e4c7ecf9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191120155948-bd437916bb0e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191220142924-d4481acd189f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200106162015-b016eb3dc98e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200122134326-e047566fdf82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200124204421-9fbb57f87de9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200519105757-fe76b779f299/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200420163511-1957bb5e6d1f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200602225109-6fdc65e7d980/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200814200057-3d37ad5750ed/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200615200032-f1bc736245b1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200625212154-ddb9806d33ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210112080510-489259a85091/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210124154548-22da62e12c0c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210303074136-134d130e1a04/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210809222454-d867a43fc93e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210927094055-39ccf1dd6fa6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211025201205-69cdffdb9359/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220315194320-039c03cc5b86/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220412211240-33da011f77ad/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220503163025-988cb79eb6c6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20221010170243-090e33056c14/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.17.0 h1:25cE3gD+tdBA7lp7QfhuV+rJiE9YXTcS3VG1SqssI/Y= +golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.19.0 h1:q5f1RH2jigJ1MoAWp2KTp3gm5zAGFUTarQZ5U386+4o= +golang.org/x/sys v0.19.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= +golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= +golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k= +golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo= +golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk= +golang.org/x/term v0.19.0 h1:+ThwsDv+tYfnJFhF4L8jITxu1tdTWRTZpdsWgEgjL6Q= +golang.org/x/term v0.19.0/go.mod h1:2CuTdWZ7KHSQwUzKva0cbMg6q2DMI3Mmxp+gKJbskEk= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= +golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= +golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ= golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= golang.org/x/time v0.0.0-20180412165947-fbb02b2291d2/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.5.0 h1:o7cqy6amK/52YcAKIPlM3a+Fpj35zvRj2TP+e1xFSfk= +golang.org/x/time v0.5.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM= +golang.org/x/tools v0.0.0-20180221164845-07fd8470d635/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20180828015842-6cd1fcedba52/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20181030000716-a0a13e073c7b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= @@ -727,28 +1429,36 @@ golang.org/x/tools v0.0.0-20181030221726-6c7e314b6563/go.mod h1:n7NCudcB/nEzxVGm golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= golang.org/x/tools v0.0.0-20190328211700-ab21143f2384/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= golang.org/x/tools v0.0.0-20191029041327-9cc4af7d6b2c/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191029190741-b9c20aec41a5/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191108193012-7d206e10da11/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20200103221440-774c71fcf114/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200207183749-b753a1ba74fa/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20201224043029-2b0845dc783e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= -golang.org/x/tools v0.16.1 h1:TLyB3WofjdOEepBHAU20JdNC1Zbg87elYofWYAY5oZA= -golang.org/x/tools v0.16.1/go.mod h1:kYVVN6I1mBNoB1OX+noeBjbRk4IUEPa7JJ+TJMEooJ0= +golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= +golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU= +golang.org/x/tools v0.18.0 h1:k8NLag8AGHnn+PHbl7g43CtqZAwG60vZkLqgyZgIHgQ= +golang.org/x/tools v0.18.0/go.mod h1:GL7B4CwcLLeo59yx/9UWWuNOW1n3VZ4f5axWfML7Lcg= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20220517211312-f3a8303e98df/go.mod h1:K8+ghG5WaK9qNqU5K3HdILfMLy1f3aNYFI/wnl100a8= gonum.org/v1/gonum v0.14.0 h1:2NiG67LD1tEH0D7kM+ps2V+fXmsAnpUeec7n8tcr4S0= gonum.org/v1/gonum v0.14.0/go.mod h1:AoWeoz0becf9QMWtE8iWXNXc27fK4fNeHNf/oMejGfU= google.golang.org/api v0.0.0-20180910000450-7ca32eb868bf/go.mod h1:4mhQ8q/RsB7i+udVvVy5NUi08OU8ZlA0gRVgrF7VFY0= google.golang.org/api v0.0.0-20181030000543-1d582fd0359e/go.mod h1:4mhQ8q/RsB7i+udVvVy5NUi08OU8ZlA0gRVgrF7VFY0= google.golang.org/api v0.1.0/go.mod h1:UGEZY7KEX120AnNLIHFMKIo4obdJhkp2tPbaPlQx13Y= +google.golang.org/api v0.3.1/go.mod h1:6wY9I6uQWHQ8EM57III9mq/AjF+i8G65rmVagqKMtkk= google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= google.golang.org/appengine v1.2.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/appengine v1.3.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= @@ -758,20 +1468,43 @@ google.golang.org/genproto v0.0.0-20180831171423-11092d34479b/go.mod h1:JiN7NxoA google.golang.org/genproto v0.0.0-20181029155118-b69ba1387ce2/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= google.golang.org/genproto v0.0.0-20181202183823-bd91e49a0898/go.mod h1:7Ep/1NZk928CDR8SjdVbjWNpdIf6nzjE3BTgJDr2Atg= google.golang.org/genproto v0.0.0-20190306203927-b5d61aea6440/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190425155659-357c62f0e4bb/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190530194941-fb225487d101/go.mod h1:z3L6/3dTEVtUr6QSP8miRzeRqwQOioJ9I66odjN4I7s= google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= +google.golang.org/genproto v0.0.0-20200423170343-7949de9c1215/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200513103714-09dca8ec2884/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo= -google.golang.org/genproto/googleapis/rpc v0.0.0-20240123012728-ef4313101c80 h1:AjyfHzEPEFp/NpvfN5g+KDla3EMojjhRVZc1i7cj+oM= -google.golang.org/genproto/googleapis/rpc v0.0.0-20240123012728-ef4313101c80/go.mod h1:PAREbraiVEVGVdTZsVWjSbbTtSyGbAgIIvni8a8CD5s= +google.golang.org/genproto v0.0.0-20210126160654-44e461bb6506/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20220314164441-57ef72a4c106/go.mod h1:hAL49I2IFola2sVEjAn7MEwsja0xp51I0tlGAf9hz4E= +google.golang.org/genproto v0.0.0-20240227224415-6ceb2ff114de h1:F6qOa9AZTYJXOUEr4jDysRDLrm4PHePlge4v4TGAlxY= +google.golang.org/genproto v0.0.0-20240227224415-6ceb2ff114de/go.mod h1:VUhTRKeHn9wwcdrk73nvdC9gF178Tzhmt/qyaFcPLSo= +google.golang.org/genproto/googleapis/api v0.0.0-20240227224415-6ceb2ff114de h1:jFNzHPIeuzhdRwVhbZdiym9q0ory/xY3sA+v2wPg8I0= +google.golang.org/genproto/googleapis/api v0.0.0-20240227224415-6ceb2ff114de/go.mod h1:5iCWqnniDlqZHrd3neWVTOwvh/v6s3232omMecelax8= +google.golang.org/genproto/googleapis/rpc v0.0.0-20240227224415-6ceb2ff114de h1:cZGRis4/ot9uVm639a+rHCUaG0JJHEsdyzSQTMX+suY= +google.golang.org/genproto/googleapis/rpc v0.0.0-20240227224415-6ceb2ff114de/go.mod h1:H4O17MA/PE9BsGx3w+a+W2VOLLD1Qf7oJneAoU6WktY= google.golang.org/grpc v1.14.0/go.mod h1:yo6s7OP7yaDglbqo1J04qKzAhqBH6lvTonzMVmEdcZw= google.golang.org/grpc v1.16.0/go.mod h1:0JHn/cJsOMiMfNA9+DeHDlAU7KAAB5GDlYFpa9MZMio= google.golang.org/grpc v1.17.0/go.mod h1:6QZJwpn2B+Zp71q/5VxRsJ6NXXVCE5NRUHRo+f3cWCs= google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= +google.golang.org/grpc v1.20.0/go.mod h1:chYK+tFQF0nDUGJgXMSgLCQk3phJEuONr2DCgLDdAQM= +google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= +google.golang.org/grpc v1.21.0/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= +google.golang.org/grpc v1.22.1/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= +google.golang.org/grpc v1.23.1/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY= +google.golang.org/grpc v1.26.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/grpc v1.29.1/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3IjizoKk= +google.golang.org/grpc v1.32.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= +google.golang.org/grpc v1.33.1/go.mod h1:fr5YgcSWrqhRRxogOsw7RzIpsmvOZ6IcH4kBYTpR3n0= google.golang.org/grpc v1.33.2/go.mod h1:JMHMWHQWaTccqQQlmk3MJZS+GWXOdAesneDmEnv2fbc= -google.golang.org/grpc v1.62.0 h1:HQKZ/fa1bXkX1oFOvSjmZEUL8wLSaZTjCcLAlmZRtdk= -google.golang.org/grpc v1.62.0/go.mod h1:IWTG0VlJLCh1SkC58F7np9ka9mx/WNkjl4PGJaiq+QE= +google.golang.org/grpc v1.36.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= +google.golang.org/grpc v1.45.0/go.mod h1:lN7owxKUQEqMfSyQikvvk5tf/6zMPsrK+ONuO11+0rQ= +google.golang.org/grpc v1.49.0/go.mod h1:ZgQEeidpAuNRZ8iRrlBKXZQP1ghovWIVhdJRyCDK+GI= +google.golang.org/grpc v1.63.2 h1:MUeiw1B2maTVZthpU5xvASfTh3LDbxHd6IJ6QQVU+xM= +google.golang.org/grpc v1.63.2/go.mod h1:WAX/8DgncnokcFUldAxq7GeB5DXHDbMF+lLvDomNkRA= google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= @@ -780,25 +1513,39 @@ google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzi google.golang.org/protobuf v1.22.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGjtUeSXeh4= google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c= google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= -google.golang.org/protobuf v1.32.0 h1:pPC6BG5ex8PDFnkbrGU3EixyhKcQ2aDuBS36lqK/C7I= -google.golang.org/protobuf v1.32.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos= +google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= +google.golang.org/protobuf v1.28.1/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= +google.golang.org/protobuf v1.31.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= +google.golang.org/protobuf v1.33.0 h1:uNO2rsAINq/JlFpSdYEKIZ0uKD/R9cpdv0T+yoGwGmI= +google.golang.org/protobuf v1.33.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos= +gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20200902074654-038fdea0a05b/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= +gopkg.in/cheggaaa/pb.v1 v1.0.25/go.mod h1:V/YB90LKu/1FcN3WVnfiiE5oMCibMjukxqG/qStrOgw= gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys= +gopkg.in/gcfg.v1 v1.2.3/go.mod h1:yesOnuUOFQAhST5vPY4nbZsb/huCgGGXlipJsBn0b3o= gopkg.in/inf.v0 v0.9.1/go.mod h1:cWUDdTG/fYaXco+Dcufb5Vnc6Gp2YChqWtbxRZE0mXw= gopkg.in/ini.v1 v1.67.0 h1:Dgnx+6+nfE+IfzjUEISNeydPJh9AXNNsWbGP9KzCsOA= gopkg.in/ini.v1 v1.67.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= +gopkg.in/resty.v1 v1.12.0/go.mod h1:mDo4pnntr5jdWRML875a/NmxYqAlA73dVijT2AXvQQo= gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 h1:uRGJdciOHaEIrze2W8Q3AKkepLTh2hOroT7a+7czfdQ= gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw= +gopkg.in/warnings.v0 v0.1.2/go.mod h1:jksf8JmL6Qr/oQM2OXTHunEvvTAsrWBLb6OOjuVWRNI= +gopkg.in/yaml.v2 v2.0.0-20170812160011-eb3733d160e7/go.mod h1:JAlM8MvJe8wmxCU4Bli9HhUf9+ttbYbLASfIpnQbh74= gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.3/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.5/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= @@ -808,6 +1555,8 @@ gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C gopkg.in/yaml.v3 v3.0.0/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gotest.tools/v3 v3.5.1 h1:EENdUnS3pdur5nybKYIh2Vfgc8IUNBjxDPSjtiJcOzU= +gotest.tools/v3 v3.5.1/go.mod h1:isy3WKz7GK6uNw/sbHzfKBLvlvXwUyV06n6brMxxopU= grpc.go4.org v0.0.0-20170609214715-11d0a25b4919/go.mod h1:77eQGdRu53HpSqPFJFmuJdjuHRquDANNeA4x7B8WQ9o= honnef.co/go/tools v0.0.0-20180728063816-88497007e858/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= @@ -816,5 +1565,14 @@ honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWh honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg= lukechampine.com/blake3 v1.2.1 h1:YuqqRuaqsGV71BV/nm9xlI0MKUv4QC54jQnBChWbGnI= lukechampine.com/blake3 v1.2.1/go.mod h1:0OFRp7fBtAylGVCO40o87sbupkyIGgbpv1+M1k1LM6k= +nhooyr.io/websocket v1.8.6/go.mod h1:B70DZP8IakI65RVQ51MsWP/8jndNma26DVA/nFSCgW0= +nhooyr.io/websocket v1.8.7 h1:usjR2uOr/zjjkVMy0lW+PPohFok7PCow5sDjLgX4P4g= +nhooyr.io/websocket v1.8.7/go.mod h1:B70DZP8IakI65RVQ51MsWP/8jndNma26DVA/nFSCgW0= +pgregory.net/rapid v1.1.0 h1:CMa0sjHSru3puNx+J0MIAuiiEV4N0qj8/cMWGBBCsjw= +pgregory.net/rapid v1.1.0/go.mod h1:PY5XlDGj0+V1FCq0o192FdRhpKHGTRIWBgqjDBTrq04= +sigs.k8s.io/yaml v1.1.0/go.mod h1:UJmg0vDUVViEyp3mgSv9WPwZCDxu4rQW1olrI1uml+o= +sigs.k8s.io/yaml v1.4.0 h1:Mk1wCc2gy/F0THH0TAp1QYyJNzRm2KCLy3o5ASXVI5E= +sigs.k8s.io/yaml v1.4.0/go.mod h1:Ejl7/uTz7PSA4eKMyQCUTnhZYNmLIl+5c2lQPGR2BPY= +sourcegraph.com/sourcegraph/appdash v0.0.0-20190731080439-ebfcffb1b5c0/go.mod h1:hI742Nqp5OhwiqlzhgfbWU4mW4yO10fP+LoT9WOswdU= sourcegraph.com/sourcegraph/go-diff v0.5.0/go.mod h1:kuch7UrkMzY0X+p9CRK03kfuPQ2zzQcaEFbx8wA8rck= sourcegraph.com/sqs/pbtypes v0.0.0-20180604144634-d3ebe8f20ae4/go.mod h1:ketZ/q3QxT9HOBeFhu6RdvsftgpsbFHBF5Cas6cDKZ0= diff --git a/http/client.go b/http/client.go index 142a359c5b..4eaadfd2d0 100644 --- a/http/client.go +++ b/http/client.go @@ -22,9 +22,10 @@ import ( blockstore "github.com/ipfs/boxo/blockstore" "github.com/lens-vm/lens/host-go/config/model" - "github.com/sourcenetwork/immutable" sse "github.com/vito/go-sse/sse" + "github.com/sourcenetwork/immutable" + "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/datastore" "github.com/sourcenetwork/defradb/events" @@ -85,11 +86,6 @@ func (c *Client) NewConcurrentTxn(ctx context.Context, readOnly bool) (datastore return &Transaction{txRes.ID, c.http}, nil } -func (c *Client) WithTxn(tx datastore.Txn) client.Store { - client := c.http.withTxn(tx.ID()) - return &Client{client} -} - func (c *Client) BasicImport(ctx context.Context, filepath string) error { methodURL := c.http.baseURL.JoinPath("backup", "import") @@ -161,6 +157,25 @@ func (c *Client) PatchSchema( return err } +func (c *Client) PatchCollection( + ctx context.Context, + patch string, +) error { + methodURL := c.http.baseURL.JoinPath("collections") + + body, err := json.Marshal(patch) + if err != nil { + return err + } + + req, err := http.NewRequestWithContext(ctx, http.MethodPatch, methodURL.String(), bytes.NewBuffer(body)) + if err != nil { + return err + } + _, err = c.http.request(req) + return err +} + func (c *Client) SetActiveSchemaVersion(ctx context.Context, schemaVersionID string) error { methodURL := c.http.baseURL.JoinPath("schema", "default") @@ -322,7 +337,10 @@ func (c *Client) GetAllIndexes(ctx context.Context) (map[client.CollectionName][ return indexes, nil } -func (c *Client) ExecRequest(ctx context.Context, query string) *client.RequestResult { +func (c *Client) ExecRequest( + ctx context.Context, + query string, +) *client.RequestResult { methodURL := c.http.baseURL.JoinPath("graphql") result := &client.RequestResult{} diff --git a/http/client_acp.go b/http/client_acp.go new file mode 100644 index 0000000000..a0140cf437 --- /dev/null +++ b/http/client_acp.go @@ -0,0 +1,44 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package http + +import ( + "context" + "net/http" + "strings" + + "github.com/sourcenetwork/defradb/client" +) + +func (c *Client) AddPolicy( + ctx context.Context, + policy string, +) (client.AddPolicyResult, error) { + methodURL := c.http.baseURL.JoinPath("acp", "policy") + + req, err := http.NewRequestWithContext( + ctx, + http.MethodPost, + methodURL.String(), + strings.NewReader(policy), + ) + + if err != nil { + return client.AddPolicyResult{}, err + } + + var policyResult client.AddPolicyResult + if err := c.http.requestJson(req, &policyResult); err != nil { + return client.AddPolicyResult{}, err + } + + return policyResult, nil +} diff --git a/http/client_collection.go b/http/client_collection.go index 876c175338..68b76c6a9e 100644 --- a/http/client_collection.go +++ b/http/client_collection.go @@ -24,8 +24,6 @@ import ( sse "github.com/vito/go-sse/sse" "github.com/sourcenetwork/defradb/client" - "github.com/sourcenetwork/defradb/client/request" - "github.com/sourcenetwork/defradb/datastore" ) var _ client.Collection = (*Collection)(nil) @@ -60,7 +58,10 @@ func (c *Collection) Definition() client.CollectionDefinition { return c.def } -func (c *Collection) Create(ctx context.Context, doc *client.Document) error { +func (c *Collection) Create( + ctx context.Context, + doc *client.Document, +) error { if !c.Description().Name.HasValue() { return client.ErrOperationNotPermittedOnNamelessCols } @@ -71,10 +72,12 @@ func (c *Collection) Create(ctx context.Context, doc *client.Document) error { if err != nil { return err } + req, err := http.NewRequestWithContext(ctx, http.MethodPost, methodURL.String(), strings.NewReader(body)) if err != nil { return err } + _, err = c.http.request(req) if err != nil { return err @@ -83,7 +86,10 @@ func (c *Collection) Create(ctx context.Context, doc *client.Document) error { return nil } -func (c *Collection) CreateMany(ctx context.Context, docs []*client.Document) error { +func (c *Collection) CreateMany( + ctx context.Context, + docs []*client.Document, +) error { if !c.Description().Name.HasValue() { return client.ErrOperationNotPermittedOnNamelessCols } @@ -97,25 +103,32 @@ func (c *Collection) CreateMany(ctx context.Context, docs []*client.Document) er } docMapList = append(docMapList, docMap) } + body, err := json.Marshal(docMapList) if err != nil { return err } + req, err := http.NewRequestWithContext(ctx, http.MethodPost, methodURL.String(), bytes.NewBuffer(body)) if err != nil { return err } + _, err = c.http.request(req) if err != nil { return err } + for _, doc := range docs { doc.Clean() } return nil } -func (c *Collection) Update(ctx context.Context, doc *client.Document) error { +func (c *Collection) Update( + ctx context.Context, + doc *client.Document, +) error { if !c.Description().Name.HasValue() { return client.ErrOperationNotPermittedOnNamelessCols } @@ -130,6 +143,7 @@ func (c *Collection) Update(ctx context.Context, doc *client.Document) error { if err != nil { return err } + _, err = c.http.request(req) if err != nil { return err @@ -138,18 +152,24 @@ func (c *Collection) Update(ctx context.Context, doc *client.Document) error { return nil } -func (c *Collection) Save(ctx context.Context, doc *client.Document) error { +func (c *Collection) Save( + ctx context.Context, + doc *client.Document, +) error { _, err := c.Get(ctx, doc.ID(), true) if err == nil { return c.Update(ctx, doc) } - if errors.Is(err, client.ErrDocumentNotFound) { + if errors.Is(err, client.ErrDocumentNotFoundOrNotAuthorized) { return c.Create(ctx, doc) } return err } -func (c *Collection) Delete(ctx context.Context, docID client.DocID) (bool, error) { +func (c *Collection) Delete( + ctx context.Context, + docID client.DocID, +) (bool, error) { if !c.Description().Name.HasValue() { return false, client.ErrOperationNotPermittedOnNamelessCols } @@ -160,6 +180,7 @@ func (c *Collection) Delete(ctx context.Context, docID client.DocID) (bool, erro if err != nil { return false, err } + _, err = c.http.request(req) if err != nil { return false, err @@ -167,7 +188,10 @@ func (c *Collection) Delete(ctx context.Context, docID client.DocID) (bool, erro return true, nil } -func (c *Collection) Exists(ctx context.Context, docID client.DocID) (bool, error) { +func (c *Collection) Exists( + ctx context.Context, + docID client.DocID, +) (bool, error) { _, err := c.Get(ctx, docID, false) if err != nil { return false, err @@ -175,22 +199,10 @@ func (c *Collection) Exists(ctx context.Context, docID client.DocID) (bool, erro return true, nil } -func (c *Collection) UpdateWith(ctx context.Context, target any, updater string) (*client.UpdateResult, error) { - switch t := target.(type) { - case string, map[string]any, *request.Filter: - return c.UpdateWithFilter(ctx, t, updater) - case client.DocID: - return c.UpdateWithDocID(ctx, t, updater) - case []client.DocID: - return c.UpdateWithDocIDs(ctx, t, updater) - default: - return nil, client.ErrInvalidUpdateTarget - } -} - -func (c *Collection) updateWith( +func (c *Collection) UpdateWithFilter( ctx context.Context, - request CollectionUpdateRequest, + filter any, + updater string, ) (*client.UpdateResult, error) { if !c.Description().Name.HasValue() { return nil, client.ErrOperationNotPermittedOnNamelessCols @@ -198,6 +210,11 @@ func (c *Collection) updateWith( methodURL := c.http.baseURL.JoinPath("collections", c.Description().Name.Value()) + request := CollectionUpdateRequest{ + Filter: filter, + Updater: updater, + } + body, err := json.Marshal(request) if err != nil { return nil, err @@ -206,6 +223,7 @@ func (c *Collection) updateWith( if err != nil { return nil, err } + var result client.UpdateResult if err := c.http.requestJson(req, &result); err != nil { return nil, err @@ -213,59 +231,9 @@ func (c *Collection) updateWith( return &result, nil } -func (c *Collection) UpdateWithFilter( +func (c *Collection) DeleteWithFilter( ctx context.Context, filter any, - updater string, -) (*client.UpdateResult, error) { - return c.updateWith(ctx, CollectionUpdateRequest{ - Filter: filter, - Updater: updater, - }) -} - -func (c *Collection) UpdateWithDocID( - ctx context.Context, - docID client.DocID, - updater string, -) (*client.UpdateResult, error) { - return c.updateWith(ctx, CollectionUpdateRequest{ - DocID: docID.String(), - Updater: updater, - }) -} - -func (c *Collection) UpdateWithDocIDs( - ctx context.Context, - docIDs []client.DocID, - updater string, -) (*client.UpdateResult, error) { - var strDocIDs []string - for _, docID := range docIDs { - strDocIDs = append(strDocIDs, docID.String()) - } - return c.updateWith(ctx, CollectionUpdateRequest{ - DocIDs: strDocIDs, - Updater: updater, - }) -} - -func (c *Collection) DeleteWith(ctx context.Context, target any) (*client.DeleteResult, error) { - switch t := target.(type) { - case string, map[string]any, *request.Filter: - return c.DeleteWithFilter(ctx, t) - case client.DocID: - return c.DeleteWithDocID(ctx, t) - case []client.DocID: - return c.DeleteWithDocIDs(ctx, t) - default: - return nil, client.ErrInvalidDeleteTarget - } -} - -func (c *Collection) deleteWith( - ctx context.Context, - request CollectionDeleteRequest, ) (*client.DeleteResult, error) { if !c.Description().Name.HasValue() { return nil, client.ErrOperationNotPermittedOnNamelessCols @@ -273,14 +241,20 @@ func (c *Collection) deleteWith( methodURL := c.http.baseURL.JoinPath("collections", c.Description().Name.Value()) + request := CollectionDeleteRequest{ + Filter: filter, + } + body, err := json.Marshal(request) if err != nil { return nil, err } + req, err := http.NewRequestWithContext(ctx, http.MethodDelete, methodURL.String(), bytes.NewBuffer(body)) if err != nil { return nil, err } + var result client.DeleteResult if err := c.http.requestJson(req, &result); err != nil { return nil, err @@ -288,29 +262,11 @@ func (c *Collection) deleteWith( return &result, nil } -func (c *Collection) DeleteWithFilter(ctx context.Context, filter any) (*client.DeleteResult, error) { - return c.deleteWith(ctx, CollectionDeleteRequest{ - Filter: filter, - }) -} - -func (c *Collection) DeleteWithDocID(ctx context.Context, docID client.DocID) (*client.DeleteResult, error) { - return c.deleteWith(ctx, CollectionDeleteRequest{ - DocID: docID.String(), - }) -} - -func (c *Collection) DeleteWithDocIDs(ctx context.Context, docIDs []client.DocID) (*client.DeleteResult, error) { - var strDocIDs []string - for _, docID := range docIDs { - strDocIDs = append(strDocIDs, docID.String()) - } - return c.deleteWith(ctx, CollectionDeleteRequest{ - DocIDs: strDocIDs, - }) -} - -func (c *Collection) Get(ctx context.Context, docID client.DocID, showDeleted bool) (*client.Document, error) { +func (c *Collection) Get( + ctx context.Context, + docID client.DocID, + showDeleted bool, +) (*client.Document, error) { if !c.Description().Name.HasValue() { return nil, client.ErrOperationNotPermittedOnNamelessCols } @@ -327,11 +283,12 @@ func (c *Collection) Get(ctx context.Context, docID client.DocID, showDeleted bo if err != nil { return nil, err } + data, err := c.http.request(req) if err != nil { return nil, err } - doc := client.NewDocWithID(docID, c.def.Schema) + doc := client.NewDocWithID(docID, c.def) err = doc.SetWithJSON(data) if err != nil { return nil, err @@ -340,14 +297,9 @@ func (c *Collection) Get(ctx context.Context, docID client.DocID, showDeleted bo return doc, nil } -func (c *Collection) WithTxn(tx datastore.Txn) client.Collection { - return &Collection{ - http: c.http.withTxn(tx.ID()), - def: c.def, - } -} - -func (c *Collection) GetAllDocIDs(ctx context.Context) (<-chan client.DocIDResult, error) { +func (c *Collection) GetAllDocIDs( + ctx context.Context, +) (<-chan client.DocIDResult, error) { if !c.Description().Name.HasValue() { return nil, client.ErrOperationNotPermittedOnNamelessCols } @@ -358,6 +310,7 @@ func (c *Collection) GetAllDocIDs(ctx context.Context) (<-chan client.DocIDResul if err != nil { return nil, err } + c.http.setDefaultHeaders(req) res, err := c.http.client.Do(req) diff --git a/http/client_lens.go b/http/client_lens.go index 9021aa31d6..34945a41d6 100644 --- a/http/client_lens.go +++ b/http/client_lens.go @@ -21,7 +21,6 @@ import ( "github.com/sourcenetwork/immutable/enumerable" "github.com/sourcenetwork/defradb/client" - "github.com/sourcenetwork/defradb/datastore" ) var _ client.LensRegistry = (*LensRegistry)(nil) @@ -31,11 +30,6 @@ type LensRegistry struct { http *httpClient } -func (c *LensRegistry) WithTxn(tx datastore.Txn) client.LensRegistry { - http := c.http.withTxn(tx.ID()) - return &LensRegistry{http} -} - type setMigrationRequest struct { CollectionID uint32 Config model.Lens diff --git a/http/errors.go b/http/errors.go index 1510c2e520..ef25d06421 100644 --- a/http/errors.go +++ b/http/errors.go @@ -19,6 +19,7 @@ import ( const ( errFailedToLoadKeys string = "failed to load given keys" errMethodIsNotImplemented string = "the method is not implemented" + errFailedToGetContext string = "failed to get context" ) // Errors returnable from this package. @@ -54,6 +55,13 @@ func (e *errorResponse) UnmarshalJSON(data []byte) error { return nil } +func NewErrFailedToGetContext(contextType string) error { + return errors.New( + errFailedToGetContext, + errors.NewKV("ContextType", contextType), + ) +} + func NewErrFailedToLoadKeys(inner error, publicKeyPath, privateKeyPath string) error { return errors.Wrap( errFailedToLoadKeys, diff --git a/http/handler.go b/http/handler.go index b06ef06cb6..80afcc5a3e 100644 --- a/http/handler.go +++ b/http/handler.go @@ -31,6 +31,7 @@ var playgroundHandler http.Handler = http.HandlerFunc(http.NotFound) func NewApiRouter() (*Router, error) { tx_handler := &txHandler{} store_handler := &storeHandler{} + acp_handler := &acpHandler{} collection_handler := &collectionHandler{} p2p_handler := &p2pHandler{} lens_handler := &lensHandler{} @@ -43,6 +44,7 @@ func NewApiRouter() (*Router, error) { tx_handler.bindRoutes(router) store_handler.bindRoutes(router) + acp_handler.bindRoutes(router) p2p_handler.bindRoutes(router) ccip_handler.bindRoutes(router) @@ -52,7 +54,6 @@ func NewApiRouter() (*Router, error) { }) router.AddRouteGroup(func(r *Router) { - r.AddMiddleware(LensMiddleware) lens_handler.bindRoutes(r) }) @@ -80,7 +81,7 @@ func NewHandler(db client.DB) (*Handler, error) { r.Use( ApiMiddleware(db, txs), TransactionMiddleware, - StoreMiddleware, + IdentityMiddleware, ) r.Handle("/*", router) }) diff --git a/http/handler_acp.go b/http/handler_acp.go new file mode 100644 index 0000000000..c3c5985c71 --- /dev/null +++ b/http/handler_acp.go @@ -0,0 +1,73 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package http + +import ( + "io" + "net/http" + + "github.com/getkin/kin-openapi/openapi3" + + "github.com/sourcenetwork/defradb/client" +) + +type acpHandler struct{} + +func (s *acpHandler) AddPolicy(rw http.ResponseWriter, req *http.Request) { + db, ok := req.Context().Value(dbContextKey).(client.DB) + if !ok { + responseJSON(rw, http.StatusBadRequest, errorResponse{NewErrFailedToGetContext("db")}) + return + } + + policyBytes, err := io.ReadAll(req.Body) + if err != nil { + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) + return + } + + addPolicyResult, err := db.AddPolicy( + req.Context(), + string(policyBytes), + ) + if err != nil { + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) + return + } + + responseJSON(rw, http.StatusOK, addPolicyResult) +} + +func (h *acpHandler) bindRoutes(router *Router) { + successResponse := &openapi3.ResponseRef{ + Ref: "#/components/responses/success", + } + errorResponse := &openapi3.ResponseRef{ + Ref: "#/components/responses/error", + } + + acpAddPolicyRequest := openapi3.NewRequestBody(). + WithRequired(true). + WithContent(openapi3.NewContentWithSchema(openapi3.NewStringSchema(), []string{"text/plain"})) + + acpAddPolicy := openapi3.NewOperation() + acpAddPolicy.OperationID = "add policy" + acpAddPolicy.Description = "Add a policy using acp system" + acpAddPolicy.Tags = []string{"acp_policy"} + acpAddPolicy.Responses = openapi3.NewResponses() + acpAddPolicy.Responses.Set("200", successResponse) + acpAddPolicy.Responses.Set("400", errorResponse) + acpAddPolicy.RequestBody = &openapi3.RequestBodyRef{ + Value: acpAddPolicyRequest, + } + + router.AddRoute("/acp/policy", http.MethodPost, acpAddPolicy, h.AddPolicy) +} diff --git a/http/handler_ccip.go b/http/handler_ccip.go index c0eb6a5918..01597377e2 100644 --- a/http/handler_ccip.go +++ b/http/handler_ccip.go @@ -35,7 +35,7 @@ type CCIPResponse struct { // ExecCCIP handles GraphQL over Cross Chain Interoperability Protocol requests. func (c *ccipHandler) ExecCCIP(rw http.ResponseWriter, req *http.Request) { - store := req.Context().Value(storeContextKey).(client.Store) + store := req.Context().Value(dbContextKey).(client.Store) var ccipReq CCIPRequest switch req.Method { diff --git a/http/handler_ccip_test.go b/http/handler_ccip_test.go index 2a2cc4f077..b89517b975 100644 --- a/http/handler_ccip_test.go +++ b/http/handler_ccip_test.go @@ -203,7 +203,7 @@ func setupDatabase(t *testing.T) client.DB { col, err := cdb.GetCollectionByName(ctx, "User") require.NoError(t, err) - doc, err := client.NewDocFromJSON([]byte(`{"name": "bob"}`), col.Schema()) + doc, err := client.NewDocFromJSON([]byte(`{"name": "bob"}`), col.Definition()) require.NoError(t, err) err = col.Create(ctx, doc) diff --git a/http/handler_collection.go b/http/handler_collection.go index d713afdf40..60c18b3442 100644 --- a/http/handler_collection.go +++ b/http/handler_collection.go @@ -26,16 +26,12 @@ import ( type collectionHandler struct{} type CollectionDeleteRequest struct { - DocID string `json:"docID"` - DocIDs []string `json:"docIDs"` - Filter any `json:"filter"` + Filter any `json:"filter"` } type CollectionUpdateRequest struct { - DocID string `json:"docID"` - DocIDs []string `json:"docIDs"` - Filter any `json:"filter"` - Updater string `json:"updater"` + Filter any `json:"filter"` + Updater string `json:"updater"` } func (s *collectionHandler) Create(rw http.ResponseWriter, req *http.Request) { @@ -49,7 +45,7 @@ func (s *collectionHandler) Create(rw http.ResponseWriter, req *http.Request) { switch { case client.IsJSONArray(data): - docList, err := client.NewDocsFromJSON(data, col.Schema()) + docList, err := client.NewDocsFromJSON(data, col.Definition()) if err != nil { responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return @@ -61,7 +57,7 @@ func (s *collectionHandler) Create(rw http.ResponseWriter, req *http.Request) { } rw.WriteHeader(http.StatusOK) default: - doc, err := client.NewDocFromJSON(data, col.Schema()) + doc, err := client.NewDocFromJSON(data, col.Definition()) if err != nil { responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return @@ -74,7 +70,7 @@ func (s *collectionHandler) Create(rw http.ResponseWriter, req *http.Request) { } } -func (s *collectionHandler) DeleteWith(rw http.ResponseWriter, req *http.Request) { +func (s *collectionHandler) DeleteWithFilter(rw http.ResponseWriter, req *http.Request) { col := req.Context().Value(colContextKey).(client.Collection) var request CollectionDeleteRequest @@ -83,48 +79,15 @@ func (s *collectionHandler) DeleteWith(rw http.ResponseWriter, req *http.Request return } - switch { - case request.Filter != nil: - result, err := col.DeleteWith(req.Context(), request.Filter) - if err != nil { - responseJSON(rw, http.StatusBadRequest, errorResponse{err}) - return - } - responseJSON(rw, http.StatusOK, result) - case request.DocID != "": - docID, err := client.NewDocIDFromString(request.DocID) - if err != nil { - responseJSON(rw, http.StatusBadRequest, errorResponse{err}) - return - } - result, err := col.DeleteWith(req.Context(), docID) - if err != nil { - responseJSON(rw, http.StatusBadRequest, errorResponse{err}) - return - } - responseJSON(rw, http.StatusOK, result) - case request.DocIDs != nil: - var docIDs []client.DocID - for _, docIDStr := range request.DocIDs { - docID, err := client.NewDocIDFromString(docIDStr) - if err != nil { - responseJSON(rw, http.StatusBadRequest, errorResponse{err}) - return - } - docIDs = append(docIDs, docID) - } - result, err := col.DeleteWith(req.Context(), docIDs) - if err != nil { - responseJSON(rw, http.StatusBadRequest, errorResponse{err}) - return - } - responseJSON(rw, http.StatusOK, result) - default: - responseJSON(rw, http.StatusBadRequest, errorResponse{ErrInvalidRequestBody}) + result, err := col.DeleteWithFilter(req.Context(), request.Filter) + if err != nil { + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) + return } + responseJSON(rw, http.StatusOK, result) } -func (s *collectionHandler) UpdateWith(rw http.ResponseWriter, req *http.Request) { +func (s *collectionHandler) UpdateWithFilter(rw http.ResponseWriter, req *http.Request) { col := req.Context().Value(colContextKey).(client.Collection) var request CollectionUpdateRequest @@ -133,45 +96,12 @@ func (s *collectionHandler) UpdateWith(rw http.ResponseWriter, req *http.Request return } - switch { - case request.Filter != nil: - result, err := col.UpdateWith(req.Context(), request.Filter, request.Updater) - if err != nil { - responseJSON(rw, http.StatusBadRequest, errorResponse{err}) - return - } - responseJSON(rw, http.StatusOK, result) - case request.DocID != "": - docID, err := client.NewDocIDFromString(request.DocID) - if err != nil { - responseJSON(rw, http.StatusBadRequest, errorResponse{err}) - return - } - result, err := col.UpdateWith(req.Context(), docID, request.Updater) - if err != nil { - responseJSON(rw, http.StatusBadRequest, errorResponse{err}) - return - } - responseJSON(rw, http.StatusOK, result) - case request.DocIDs != nil: - var docIDs []client.DocID - for _, docIDStr := range request.DocIDs { - docID, err := client.NewDocIDFromString(docIDStr) - if err != nil { - responseJSON(rw, http.StatusBadRequest, errorResponse{err}) - return - } - docIDs = append(docIDs, docID) - } - result, err := col.UpdateWith(req.Context(), docIDs, request.Updater) - if err != nil { - responseJSON(rw, http.StatusBadRequest, errorResponse{err}) - return - } - responseJSON(rw, http.StatusOK, result) - default: - responseJSON(rw, http.StatusBadRequest, errorResponse{ErrInvalidRequestBody}) + result, err := col.UpdateWithFilter(req.Context(), request.Filter, request.Updater) + if err != nil { + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) + return } + responseJSON(rw, http.StatusOK, result) } func (s *collectionHandler) Update(rw http.ResponseWriter, req *http.Request) { @@ -182,11 +112,18 @@ func (s *collectionHandler) Update(rw http.ResponseWriter, req *http.Request) { responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } + doc, err := col.Get(req.Context(), docID, true) if err != nil { responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } + + if doc == nil { + responseJSON(rw, http.StatusBadRequest, errorResponse{client.ErrDocumentNotFoundOrNotAuthorized}) + return + } + patch, err := io.ReadAll(req.Body) if err != nil { responseJSON(rw, http.StatusBadRequest, errorResponse{err}) @@ -212,6 +149,7 @@ func (s *collectionHandler) Delete(rw http.ResponseWriter, req *http.Request) { responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } + _, err = col.Delete(req.Context(), docID) if err != nil { responseJSON(rw, http.StatusBadRequest, errorResponse{err}) @@ -229,11 +167,18 @@ func (s *collectionHandler) Get(rw http.ResponseWriter, req *http.Request) { responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } + doc, err := col.Get(req.Context(), docID, showDeleted) if err != nil { responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } + + if doc == nil { + responseJSON(rw, http.StatusBadRequest, errorResponse{client.ErrDocumentNotFoundOrNotAuthorized}) + return + } + docMap, err := doc.ToMap() if err != nil { responseJSON(rw, http.StatusBadRequest, errorResponse{err}) @@ -302,7 +247,7 @@ func (s *collectionHandler) CreateIndex(rw http.ResponseWriter, req *http.Reques } func (s *collectionHandler) GetIndexes(rw http.ResponseWriter, req *http.Request) { - store := req.Context().Value(storeContextKey).(client.Store) + store := req.Context().Value(dbContextKey).(client.Store) indexesMap, err := store.GetAllIndexes(req.Context()) if err != nil { @@ -392,7 +337,7 @@ func (h *collectionHandler) bindRoutes(router *Router) { WithJSONSchemaRef(updateResultSchema) collectionUpdateWith := openapi3.NewOperation() - collectionUpdateWith.OperationID = "collection_update_with" + collectionUpdateWith.OperationID = "collection_update_with_filter" collectionUpdateWith.Description = "Update document(s) in a collection" collectionUpdateWith.Tags = []string{"collection"} collectionUpdateWith.AddParameter(collectionNamePathParam) @@ -411,7 +356,7 @@ func (h *collectionHandler) bindRoutes(router *Router) { WithJSONSchemaRef(deleteResultSchema) collectionDeleteWith := openapi3.NewOperation() - collectionDeleteWith.OperationID = "collections_delete_with" + collectionDeleteWith.OperationID = "collection_delete_with_filter" collectionDeleteWith.Description = "Delete document(s) from a collection" collectionDeleteWith.Tags = []string{"collection"} collectionDeleteWith.AddParameter(collectionNamePathParam) @@ -516,8 +461,8 @@ func (h *collectionHandler) bindRoutes(router *Router) { router.AddRoute("/collections/{name}", http.MethodGet, collectionKeys, h.GetAllDocIDs) router.AddRoute("/collections/{name}", http.MethodPost, collectionCreate, h.Create) - router.AddRoute("/collections/{name}", http.MethodPatch, collectionUpdateWith, h.UpdateWith) - router.AddRoute("/collections/{name}", http.MethodDelete, collectionDeleteWith, h.DeleteWith) + router.AddRoute("/collections/{name}", http.MethodPatch, collectionUpdateWith, h.UpdateWithFilter) + router.AddRoute("/collections/{name}", http.MethodDelete, collectionDeleteWith, h.DeleteWithFilter) router.AddRoute("/collections/{name}/indexes", http.MethodPost, createIndex, h.CreateIndex) router.AddRoute("/collections/{name}/indexes", http.MethodGet, getIndexes, h.GetIndexes) router.AddRoute("/collections/{name}/indexes/{index}", http.MethodDelete, dropIndex, h.DropIndex) diff --git a/http/handler_lens.go b/http/handler_lens.go index 532eaacefc..94ef9c2abe 100644 --- a/http/handler_lens.go +++ b/http/handler_lens.go @@ -22,9 +22,9 @@ import ( type lensHandler struct{} func (s *lensHandler) ReloadLenses(rw http.ResponseWriter, req *http.Request) { - lens := req.Context().Value(lensContextKey).(client.LensRegistry) + store := req.Context().Value(dbContextKey).(client.Store) - err := lens.ReloadLenses(req.Context()) + err := store.LensRegistry().ReloadLenses(req.Context()) if err != nil { responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return @@ -33,7 +33,7 @@ func (s *lensHandler) ReloadLenses(rw http.ResponseWriter, req *http.Request) { } func (s *lensHandler) SetMigration(rw http.ResponseWriter, req *http.Request) { - lens := req.Context().Value(lensContextKey).(client.LensRegistry) + store := req.Context().Value(dbContextKey).(client.Store) var request setMigrationRequest if err := requestJSON(req, &request); err != nil { @@ -41,7 +41,7 @@ func (s *lensHandler) SetMigration(rw http.ResponseWriter, req *http.Request) { return } - err := lens.SetMigration(req.Context(), request.CollectionID, request.Config) + err := store.LensRegistry().SetMigration(req.Context(), request.CollectionID, request.Config) if err != nil { responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return @@ -50,7 +50,7 @@ func (s *lensHandler) SetMigration(rw http.ResponseWriter, req *http.Request) { } func (s *lensHandler) MigrateUp(rw http.ResponseWriter, req *http.Request) { - lens := req.Context().Value(lensContextKey).(client.LensRegistry) + store := req.Context().Value(dbContextKey).(client.Store) var request migrateRequest if err := requestJSON(req, &request); err != nil { @@ -58,7 +58,7 @@ func (s *lensHandler) MigrateUp(rw http.ResponseWriter, req *http.Request) { return } - result, err := lens.MigrateUp(req.Context(), enumerable.New(request.Data), request.CollectionID) + result, err := store.LensRegistry().MigrateUp(req.Context(), enumerable.New(request.Data), request.CollectionID) if err != nil { responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return @@ -75,7 +75,7 @@ func (s *lensHandler) MigrateUp(rw http.ResponseWriter, req *http.Request) { } func (s *lensHandler) MigrateDown(rw http.ResponseWriter, req *http.Request) { - lens := req.Context().Value(lensContextKey).(client.LensRegistry) + store := req.Context().Value(dbContextKey).(client.Store) var request migrateRequest if err := requestJSON(req, &request); err != nil { @@ -83,7 +83,7 @@ func (s *lensHandler) MigrateDown(rw http.ResponseWriter, req *http.Request) { return } - result, err := lens.MigrateDown(req.Context(), enumerable.New(request.Data), request.CollectionID) + result, err := store.LensRegistry().MigrateDown(req.Context(), enumerable.New(request.Data), request.CollectionID) if err != nil { responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return diff --git a/http/handler_store.go b/http/handler_store.go index af82f0bc44..521aa13775 100644 --- a/http/handler_store.go +++ b/http/handler_store.go @@ -27,7 +27,7 @@ import ( type storeHandler struct{} func (s *storeHandler) BasicImport(rw http.ResponseWriter, req *http.Request) { - store := req.Context().Value(storeContextKey).(client.Store) + store := req.Context().Value(dbContextKey).(client.Store) var config client.BackupConfig if err := requestJSON(req, &config); err != nil { @@ -43,7 +43,7 @@ func (s *storeHandler) BasicImport(rw http.ResponseWriter, req *http.Request) { } func (s *storeHandler) BasicExport(rw http.ResponseWriter, req *http.Request) { - store := req.Context().Value(storeContextKey).(client.Store) + store := req.Context().Value(dbContextKey).(client.Store) var config client.BackupConfig if err := requestJSON(req, &config); err != nil { @@ -59,7 +59,7 @@ func (s *storeHandler) BasicExport(rw http.ResponseWriter, req *http.Request) { } func (s *storeHandler) AddSchema(rw http.ResponseWriter, req *http.Request) { - store := req.Context().Value(storeContextKey).(client.Store) + store := req.Context().Value(dbContextKey).(client.Store) schema, err := io.ReadAll(req.Body) if err != nil { @@ -75,7 +75,7 @@ func (s *storeHandler) AddSchema(rw http.ResponseWriter, req *http.Request) { } func (s *storeHandler) PatchSchema(rw http.ResponseWriter, req *http.Request) { - store := req.Context().Value(storeContextKey).(client.Store) + store := req.Context().Value(dbContextKey).(client.Store) var message patchSchemaRequest err := requestJSON(req, &message) @@ -92,8 +92,26 @@ func (s *storeHandler) PatchSchema(rw http.ResponseWriter, req *http.Request) { rw.WriteHeader(http.StatusOK) } +func (s *storeHandler) PatchCollection(rw http.ResponseWriter, req *http.Request) { + store := req.Context().Value(dbContextKey).(client.Store) + + var patch string + err := requestJSON(req, &patch) + if err != nil { + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) + return + } + + err = store.PatchCollection(req.Context(), patch) + if err != nil { + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) + return + } + rw.WriteHeader(http.StatusOK) +} + func (s *storeHandler) SetActiveSchemaVersion(rw http.ResponseWriter, req *http.Request) { - store := req.Context().Value(storeContextKey).(client.Store) + store := req.Context().Value(dbContextKey).(client.Store) schemaVersionID, err := io.ReadAll(req.Body) if err != nil { @@ -109,7 +127,7 @@ func (s *storeHandler) SetActiveSchemaVersion(rw http.ResponseWriter, req *http. } func (s *storeHandler) AddView(rw http.ResponseWriter, req *http.Request) { - store := req.Context().Value(storeContextKey).(client.Store) + store := req.Context().Value(dbContextKey).(client.Store) var message addViewRequest err := requestJSON(req, &message) @@ -128,7 +146,7 @@ func (s *storeHandler) AddView(rw http.ResponseWriter, req *http.Request) { } func (s *storeHandler) SetMigration(rw http.ResponseWriter, req *http.Request) { - store := req.Context().Value(storeContextKey).(client.Store) + store := req.Context().Value(dbContextKey).(client.Store) var cfg client.LensConfig if err := requestJSON(req, &cfg); err != nil { @@ -145,7 +163,7 @@ func (s *storeHandler) SetMigration(rw http.ResponseWriter, req *http.Request) { } func (s *storeHandler) GetCollection(rw http.ResponseWriter, req *http.Request) { - store := req.Context().Value(storeContextKey).(client.Store) + store := req.Context().Value(dbContextKey).(client.Store) options := client.CollectionFetchOptions{} if req.URL.Query().Has("name") { @@ -181,7 +199,7 @@ func (s *storeHandler) GetCollection(rw http.ResponseWriter, req *http.Request) } func (s *storeHandler) GetSchema(rw http.ResponseWriter, req *http.Request) { - store := req.Context().Value(storeContextKey).(client.Store) + store := req.Context().Value(dbContextKey).(client.Store) options := client.SchemaFetchOptions{} if req.URL.Query().Has("version_id") { @@ -203,7 +221,7 @@ func (s *storeHandler) GetSchema(rw http.ResponseWriter, req *http.Request) { } func (s *storeHandler) GetAllIndexes(rw http.ResponseWriter, req *http.Request) { - store := req.Context().Value(storeContextKey).(client.Store) + store := req.Context().Value(dbContextKey).(client.Store) indexes, err := store.GetAllIndexes(req.Context()) if err != nil { @@ -278,7 +296,7 @@ func (res *GraphQLResponse) UnmarshalJSON(data []byte) error { } func (s *storeHandler) ExecRequest(rw http.ResponseWriter, req *http.Request) { - store := req.Context().Value(storeContextKey).(client.Store) + store := req.Context().Value(dbContextKey).(client.Store) var request GraphQLRequest switch { @@ -293,6 +311,7 @@ func (s *storeHandler) ExecRequest(rw http.ResponseWriter, req *http.Request) { responseJSON(rw, http.StatusBadRequest, errorResponse{ErrMissingRequest}) return } + result := store.ExecRequest(req.Context(), request.Query) if result.Pub == nil { @@ -476,6 +495,17 @@ func (h *storeHandler) bindRoutes(router *Router) { collectionDescribe.AddResponse(200, collectionsResponse) collectionDescribe.Responses.Set("400", errorResponse) + patchCollection := openapi3.NewOperation() + patchCollection.OperationID = "patch_collection" + patchCollection.Description = "Update collection definitions" + patchCollection.Tags = []string{"collection"} + patchCollection.RequestBody = &openapi3.RequestBodyRef{ + Value: openapi3.NewRequestBody().WithJSONSchema(openapi3.NewStringSchema()), + } + patchCollection.Responses = openapi3.NewResponses() + patchCollection.Responses.Set("200", successResponse) + patchCollection.Responses.Set("400", errorResponse) + collectionDefintionsSchema := openapi3.NewArraySchema() collectionDefintionsSchema.Items = collectionDefinitionSchema @@ -590,7 +620,7 @@ func (h *storeHandler) bindRoutes(router *Router) { router.AddRoute("/backup/export", http.MethodPost, backupExport, h.BasicExport) router.AddRoute("/backup/import", http.MethodPost, backupImport, h.BasicImport) router.AddRoute("/collections", http.MethodGet, collectionDescribe, h.GetCollection) - router.AddRoute("/view", http.MethodPost, views, h.AddView) + router.AddRoute("/collections", http.MethodPatch, patchCollection, h.PatchCollection) router.AddRoute("/view", http.MethodPost, views, h.AddView) router.AddRoute("/graphql", http.MethodGet, graphQLGet, h.ExecRequest) router.AddRoute("/graphql", http.MethodPost, graphQLPost, h.ExecRequest) diff --git a/http/http_client.go b/http/http_client.go index 13abb3c6d0..f8e63fe70a 100644 --- a/http/http_client.go +++ b/http/http_client.go @@ -17,12 +17,13 @@ import ( "net/http" "net/url" "strings" + + "github.com/sourcenetwork/defradb/db" ) type httpClient struct { client *http.Client baseURL *url.URL - txValue string } func newHttpClient(rawURL string) (*httpClient, error) { @@ -40,20 +41,17 @@ func newHttpClient(rawURL string) (*httpClient, error) { return &client, nil } -func (c *httpClient) withTxn(value uint64) *httpClient { - return &httpClient{ - client: c.client, - baseURL: c.baseURL, - txValue: fmt.Sprintf("%d", value), - } -} - func (c *httpClient) setDefaultHeaders(req *http.Request) { req.Header.Set("Accept", "application/json") req.Header.Set("Content-Type", "application/json") - if c.txValue != "" { - req.Header.Set(TX_HEADER_NAME, c.txValue) + txn, ok := db.TryGetContextTxn(req.Context()) + if ok { + req.Header.Set(txHeaderName, fmt.Sprintf("%d", txn.ID())) + } + id := db.GetContextIdentity(req.Context()) + if id.HasValue() { + req.Header.Add(authHeaderName, authSchemaPrefix+id.Value().String()) } } diff --git a/http/logger.go b/http/logger.go index d23f65e94a..c4e715f695 100644 --- a/http/logger.go +++ b/http/logger.go @@ -15,11 +15,10 @@ import ( "time" "github.com/go-chi/chi/v5/middleware" - - "github.com/sourcenetwork/defradb/logging" + "github.com/sourcenetwork/corelog" ) -var log = logging.MustNewLogger("http") +var log = corelog.NewLogger("http") type logEntry struct { req *http.Request @@ -28,14 +27,14 @@ type logEntry struct { var _ middleware.LogEntry = (*logEntry)(nil) func (e *logEntry) Write(status, bytes int, header http.Header, elapsed time.Duration, extra any) { - log.Info( + log.InfoContext( e.req.Context(), "Request", - logging.NewKV("Method", e.req.Method), - logging.NewKV("Path", e.req.URL.Path), - logging.NewKV("Status", status), - logging.NewKV("LengthBytes", bytes), - logging.NewKV("ElapsedTime", elapsed.String()), + corelog.String("Method", e.req.Method), + corelog.String("Path", e.req.URL.Path), + corelog.Int("Status", status), + corelog.Int("LengthBytes", bytes), + corelog.Duration("ElapsedTime", elapsed), ) } diff --git a/http/middleware.go b/http/middleware.go index f18ba8bf60..4655868373 100644 --- a/http/middleware.go +++ b/http/middleware.go @@ -21,11 +21,23 @@ import ( "github.com/go-chi/cors" "golang.org/x/exp/slices" + acpIdentity "github.com/sourcenetwork/defradb/acp/identity" "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/datastore" + "github.com/sourcenetwork/defradb/db" ) -const TX_HEADER_NAME = "x-defradb-tx" +const ( + // txHeaderName is the name of the transaction header. + // This header should contain a valid transaction id. + txHeaderName = "x-defradb-tx" + // authHeaderName is the name of the authorization header. + // This header should contain an ACP identity. + authHeaderName = "Authorization" + // Using Basic right now, but this will soon change to 'Bearer' as acp authentication + // gets implemented: https://github.com/sourcenetwork/defradb/issues/2017 + authSchemaPrefix = "Basic " +) type contextKey string @@ -34,20 +46,6 @@ var ( txsContextKey = contextKey("txs") // dbContextKey is the context key for the client.DB dbContextKey = contextKey("db") - // txContextKey is the context key for the datastore.Txn - // - // This will only be set if a transaction id is specified. - txContextKey = contextKey("tx") - // storeContextKey is the context key for the client.Store - // - // If a transaction exists, all operations will be executed - // in the current transaction context. - storeContextKey = contextKey("store") - // lensContextKey is the context key for the client.LensRegistry - // - // If a transaction exists, all operations will be executed - // in the current transaction context. - lensContextKey = contextKey("lens") // colContextKey is the context key for the client.Collection // // If a transaction exists, all operations will be executed @@ -87,7 +85,7 @@ func TransactionMiddleware(next http.Handler) http.Handler { return http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) { txs := req.Context().Value(txsContextKey).(*sync.Map) - txValue := req.Header.Get(TX_HEADER_NAME) + txValue := req.Header.Get(txHeaderName) if txValue == "" { next.ServeHTTP(rw, req) return @@ -102,62 +100,46 @@ func TransactionMiddleware(next http.Handler) http.Handler { next.ServeHTTP(rw, req) return } - - ctx := context.WithValue(req.Context(), txContextKey, tx) - next.ServeHTTP(rw, req.WithContext(ctx)) - }) -} - -// StoreMiddleware sets the db context for the current request. -func StoreMiddleware(next http.Handler) http.Handler { - return http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) { - db := req.Context().Value(dbContextKey).(client.DB) - - var store client.Store - if tx, ok := req.Context().Value(txContextKey).(datastore.Txn); ok { - store = db.WithTxn(tx) - } else { - store = db + ctx := req.Context() + if val, ok := tx.(datastore.Txn); ok { + ctx = db.SetContextTxn(ctx, val) } - - ctx := context.WithValue(req.Context(), storeContextKey, store) next.ServeHTTP(rw, req.WithContext(ctx)) }) } -// LensMiddleware sets the lens context for the current request. -func LensMiddleware(next http.Handler) http.Handler { +// CollectionMiddleware sets the collection context for the current request. +func CollectionMiddleware(next http.Handler) http.Handler { return http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) { - store := req.Context().Value(storeContextKey).(client.Store) + db := req.Context().Value(dbContextKey).(client.DB) - var lens client.LensRegistry - if tx, ok := req.Context().Value(txContextKey).(datastore.Txn); ok { - lens = store.LensRegistry().WithTxn(tx) - } else { - lens = store.LensRegistry() + col, err := db.GetCollectionByName(req.Context(), chi.URLParam(req, "name")) + if err != nil { + rw.WriteHeader(http.StatusNotFound) + return } - ctx := context.WithValue(req.Context(), lensContextKey, lens) + ctx := context.WithValue(req.Context(), colContextKey, col) next.ServeHTTP(rw, req.WithContext(ctx)) }) } -// CollectionMiddleware sets the collection context for the current request. -func CollectionMiddleware(next http.Handler) http.Handler { +func IdentityMiddleware(next http.Handler) http.Handler { return http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) { - store := req.Context().Value(storeContextKey).(client.Store) - - col, err := store.GetCollectionByName(req.Context(), chi.URLParam(req, "name")) - if err != nil { - rw.WriteHeader(http.StatusNotFound) + authHeader := req.Header.Get(authHeaderName) + if authHeader == "" { + next.ServeHTTP(rw, req) return } - if tx, ok := req.Context().Value(txContextKey).(datastore.Txn); ok { - col = col.WithTxn(tx) + identity := strings.TrimPrefix(authHeader, authSchemaPrefix) + // If expected schema prefix was not found, or empty, then assume no identity. + if identity == authHeader || identity == "" { + next.ServeHTTP(rw, req) + return } - ctx := context.WithValue(req.Context(), colContextKey, col) + ctx := db.SetContextIdentity(req.Context(), acpIdentity.New(identity)) next.ServeHTTP(rw, req.WithContext(ctx)) }) } diff --git a/http/openapi.go b/http/openapi.go index 12a832c704..698a88796e 100644 --- a/http/openapi.go +++ b/http/openapi.go @@ -134,6 +134,10 @@ func NewOpenAPISpec() (*openapi3.T, error) { Name: "p2p", Description: "Peer-to-peer network operations", }, + &openapi3.Tag{ + Name: "acp", + Description: "Access control policy operations", + }, &openapi3.Tag{ Name: "transaction", Description: "Database transaction operations", diff --git a/http/utils.go b/http/utils.go index c7b1507c4e..a67afef476 100644 --- a/http/utils.go +++ b/http/utils.go @@ -36,8 +36,8 @@ func responseJSON(rw http.ResponseWriter, status int, out any) { func parseError(msg any) error { switch msg { - case client.ErrDocumentNotFound.Error(): - return client.ErrDocumentNotFound + case client.ErrDocumentNotFoundOrNotAuthorized.Error(): + return client.ErrDocumentNotFoundOrNotAuthorized case badger.ErrTxnConflict.Error(): return badger.ErrTxnConflict default: diff --git a/lens/fetcher.go b/lens/fetcher.go index 1e093f3966..5477b948b5 100644 --- a/lens/fetcher.go +++ b/lens/fetcher.go @@ -16,6 +16,10 @@ import ( "github.com/fxamacker/cbor/v2" + "github.com/sourcenetwork/immutable" + + "github.com/sourcenetwork/defradb/acp" + acpIdentity "github.com/sourcenetwork/defradb/acp/identity" "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/client/request" "github.com/sourcenetwork/defradb/core" @@ -35,6 +39,7 @@ type lensedFetcher struct { txn datastore.Txn col client.Collection + // Cache the fieldDescriptions mapped by name to allow for cheaper access within the fetcher loop fieldDescriptionsByName map[string]client.FieldDefinition @@ -57,7 +62,9 @@ func NewFetcher(source fetcher.Fetcher, registry client.LensRegistry) fetcher.Fe func (f *lensedFetcher) Init( ctx context.Context, + identity immutable.Option[acpIdentity.Identity], txn datastore.Txn, + acp immutable.Option[acp.ACP], col client.Collection, fields []client.FieldDefinition, filter *mapper.Filter, @@ -76,7 +83,7 @@ func (f *lensedFetcher) Init( f.fieldDescriptionsByName[defFields[i].Name] = defFields[i] } - history, err := getTargetedSchemaHistory(ctx, txn, f.col.Schema().Root, f.col.Schema().VersionID) + history, err := getTargetedCollectionHistory(ctx, txn, f.col.Schema().Root, f.col.Schema().VersionID) if err != nil { return err } @@ -105,7 +112,18 @@ historyLoop: } else { innerFetcherFields = fields } - return f.source.Init(ctx, txn, col, innerFetcherFields, filter, docmapper, reverse, showDeleted) + return f.source.Init( + ctx, + identity, + txn, + acp, + col, + innerFetcherFields, + filter, + docmapper, + reverse, + showDeleted, + ) } func (f *lensedFetcher) Start(ctx context.Context, spans core.Spans) error { diff --git a/lens/history.go b/lens/history.go index a7a5ee57d8..eb793bff8c 100644 --- a/lens/history.go +++ b/lens/history.go @@ -20,187 +20,161 @@ import ( "github.com/sourcenetwork/defradb/db/description" ) -// schemaHistoryLink represents an item in a particular schema's history, it +// collectionHistoryLink represents an item in a particular collection's schema history, it // links to the previous and next version items if they exist. -type schemaHistoryLink struct { +type collectionHistoryLink struct { // The collection as this point in history. collection *client.CollectionDescription - // The history link to the next schema versions, if there are some + // The history link to the next collection versions, if there are some // (for the most recent schema version this will be empty). - next []*schemaHistoryLink + next []*collectionHistoryLink - // The history link to the previous schema versions, if there are - // some (for the initial schema version this will be empty). - previous []*schemaHistoryLink + // The history link to the previous collection versions, if there are + // some (for the initial collection version this will be empty). + previous []*collectionHistoryLink } -// targetedSchemaHistoryLink represents an item in a particular schema's history, it -// links to the previous and next version items if they exist. -// -// It also contains a vector which describes the distance and direction to the -// target schema version (given as an input param on construction). -type targetedSchemaHistoryLink struct { +// targetedCollectionHistoryLink represents an item in a particular collection's schema history, it +// links to the previous and next version items if they exist and are on the path to +// the target schema version. +type targetedCollectionHistoryLink struct { // The collection as this point in history. collection *client.CollectionDescription - // The link to next schema version, if there is one - // (for the most recent schema version this will be None). - next immutable.Option[*targetedSchemaHistoryLink] - - // The link to the previous schema version, if there is - // one (for the initial schema version this will be None). - previous immutable.Option[*targetedSchemaHistoryLink] + // The link to next collection version, if there is one + // (for the most recent collection version this will be None). + next immutable.Option[*targetedCollectionHistoryLink] - // The distance and direction from this history item to the target. - // - // A zero value indicates that this is the target item. A positive value - // indicates that the target is more recent. A negative value indicates - // that the target predates this history item. - targetVector int + // The link to the previous collection version, if there is + // one (for the initial collection version this will be None). + previous immutable.Option[*targetedCollectionHistoryLink] } -// getTargetedSchemaHistory returns the history of the schema of the given id, relative +// getTargetedCollectionHistory returns the history of the schema of the given id, relative // to the given target schema version id. // -// This includes any history items that are only known via registered -// schema migrations. -func getTargetedSchemaHistory( +// This includes any history items that are only known via registered schema migrations. +func getTargetedCollectionHistory( ctx context.Context, txn datastore.Txn, schemaRoot string, targetSchemaVersionID string, -) (map[schemaVersionID]*targetedSchemaHistoryLink, error) { - history, err := getSchemaHistory(ctx, txn, schemaRoot) +) (map[schemaVersionID]*targetedCollectionHistoryLink, error) { + history, err := getCollectionHistory(ctx, txn, schemaRoot) if err != nil { return nil, err } - result := map[schemaVersionID]*targetedSchemaHistoryLink{} - - for _, item := range history { - result[item.collection.SchemaVersionID] = &targetedSchemaHistoryLink{ - collection: item.collection, - } + targetHistoryItem, ok := history[targetSchemaVersionID] + if !ok { + // If the target schema version is unknown then there are no possible migrations + // that we can do. + return nil, nil } - for _, item := range result { - schemaHistoryLink := history[item.collection.ID] - nextHistoryItems := schemaHistoryLink.next - if len(nextHistoryItems) == 0 { - continue - } + result := map[schemaVersionID]*targetedCollectionHistoryLink{} - // WARNING: This line assumes that each collection can only have a single source, and so - // just takes the first item. If/when collections can have multiple sources we will need to change - // this slightly. - nextItem := result[nextHistoryItems[0].collection.SchemaVersionID] - item.next = immutable.Some(nextItem) - nextItem.previous = immutable.Some(item) + targetLink := &targetedCollectionHistoryLink{ + collection: targetHistoryItem.collection, } + result[targetLink.collection.SchemaVersionID] = targetLink - orphanSchemaVersions := map[string]struct{}{} + linkForwards(targetLink, targetHistoryItem, result) + linkBackwards(targetLink, targetHistoryItem, result) - for schemaVersion, item := range result { - if item.collection.SchemaVersionID == targetSchemaVersionID { - continue - } - if item.targetVector != 0 { + return result, nil +} + +// linkForwards traverses and links the history forwards from the given starting point. +// +// Forward collection versions found will in turn be linked both forwards and backwards, allowing +// branches to be correctly mapped to the target schema version. +func linkForwards( + currentLink *targetedCollectionHistoryLink, + currentHistoryItem *collectionHistoryLink, + result map[schemaVersionID]*targetedCollectionHistoryLink, +) { + for _, nextHistoryItem := range currentHistoryItem.next { + if _, ok := result[nextHistoryItem.collection.SchemaVersionID]; ok { + // As the history forms a DAG, this should only ever happen when + // iterating through the item we were at immediately before the current. continue } - distanceTravelled := 0 - currentItem := item - wasFound := false - for { - if !currentItem.next.HasValue() { - break - } - - currentItem = currentItem.next.Value() - distanceTravelled++ - if currentItem.targetVector != 0 { - distanceTravelled += currentItem.targetVector - wasFound = true - break - } - if currentItem.collection.SchemaVersionID == targetSchemaVersionID { - wasFound = true - break - } + nextLink := &targetedCollectionHistoryLink{ + collection: nextHistoryItem.collection, + previous: immutable.Some(currentLink), } + result[nextLink.collection.SchemaVersionID] = nextLink - if !wasFound { - // The target was not found going up the chain, try looking back. - // This is important for downgrading schema versions. - for { - if !currentItem.previous.HasValue() { - break - } - - currentItem = currentItem.previous.Value() - distanceTravelled-- - if currentItem.targetVector != 0 { - distanceTravelled += currentItem.targetVector - wasFound = true - break - } - if currentItem.collection.SchemaVersionID == targetSchemaVersionID { - wasFound = true - break - } - } - } + linkForwards(nextLink, nextHistoryItem, result) + linkBackwards(nextLink, nextHistoryItem, result) + } +} - if !wasFound { - // This may happen if users define schema migrations to unknown schema versions - // with no migration path to known schema versions, esentially creating orphan - // migrations. These may become linked later and should remain persisted in the - // database, but we can drop them from the history here/now. - orphanSchemaVersions[schemaVersion] = struct{}{} +// linkBackwards traverses and links the history backwards from the given starting point. +// +// Backward collection versions found will in turn be linked both forwards and backwards, allowing +// branches to be correctly mapped to the target schema version. +func linkBackwards( + currentLink *targetedCollectionHistoryLink, + currentHistoryItem *collectionHistoryLink, + result map[schemaVersionID]*targetedCollectionHistoryLink, +) { + for _, prevHistoryItem := range currentHistoryItem.previous { + if _, ok := result[prevHistoryItem.collection.SchemaVersionID]; ok { + // As the history forms a DAG, this should only ever happen when + // iterating through the item we were at immediately before the current. continue } - item.targetVector = distanceTravelled - } + prevLink := &targetedCollectionHistoryLink{ + collection: prevHistoryItem.collection, + next: immutable.Some(currentLink), + } + result[prevLink.collection.SchemaVersionID] = prevLink - for schemaVersion := range orphanSchemaVersions { - delete(result, schemaVersion) + linkForwards(prevLink, prevHistoryItem, result) + linkBackwards(prevLink, prevHistoryItem, result) } - - return result, nil } -// getSchemaHistory returns the history of the schema of the given id as linked list +// getCollectionHistory returns the history of the collection of the given root id as linked list // with each item mapped by schema version id. // -// This includes any history items that are only known via registered -// schema migrations. -func getSchemaHistory( +// This includes any history items that are only known via registered schema migrations. +func getCollectionHistory( ctx context.Context, txn datastore.Txn, schemaRoot string, -) (map[collectionID]*schemaHistoryLink, error) { +) (map[schemaVersionID]*collectionHistoryLink, error) { cols, err := description.GetCollectionsBySchemaRoot(ctx, txn, schemaRoot) if err != nil { return nil, err } - history := map[collectionID]*schemaHistoryLink{} + history := map[schemaVersionID]*collectionHistoryLink{} + schemaVersionsByColID := map[uint32]schemaVersionID{} for _, c := range cols { + // Todo - this `col := c` can be removed with Go 1.22: + // https://github.com/sourcenetwork/defradb/issues/2431 col := c + // Convert the temporary types to the cleaner return type: - history[col.ID] = &schemaHistoryLink{ + history[col.SchemaVersionID] = &collectionHistoryLink{ collection: &col, } + schemaVersionsByColID[col.ID] = col.SchemaVersionID } for _, historyItem := range history { for _, source := range historyItem.collection.CollectionSources() { - src := history[source.SourceCollectionID] + srcSchemaVersion := schemaVersionsByColID[source.SourceCollectionID] + src := history[srcSchemaVersion] historyItem.previous = append( - historyItem.next, + historyItem.previous, src, ) diff --git a/lens/lens.go b/lens/lens.go index 4e700d7324..1a42bdf972 100644 --- a/lens/lens.go +++ b/lens/lens.go @@ -19,7 +19,6 @@ import ( ) type schemaVersionID = string -type collectionID = uint32 // LensDoc represents a document that will be sent to/from a Lens. type LensDoc = map[string]any @@ -57,7 +56,7 @@ type lens struct { outputPipe enumerable.Concatenation[LensDoc] unknownVersionPipe enumerable.Queue[LensDoc] - schemaVersionHistory map[schemaVersionID]*targetedSchemaHistoryLink + collectionHistory map[schemaVersionID]*targetedCollectionHistoryLink source enumerable.Queue[lensInput] } @@ -68,18 +67,18 @@ func new( ctx context.Context, lensRegistry client.LensRegistry, targetSchemaVersionID schemaVersionID, - schemaVersionHistory map[schemaVersionID]*targetedSchemaHistoryLink, + collectionHistory map[schemaVersionID]*targetedCollectionHistoryLink, ) Lens { targetSource := enumerable.NewQueue[LensDoc]() outputPipe := enumerable.Concat[LensDoc](targetSource) return &lens{ - lensRegistry: lensRegistry, - ctx: ctx, - source: enumerable.NewQueue[lensInput](), - outputPipe: outputPipe, - unknownVersionPipe: targetSource, - schemaVersionHistory: schemaVersionHistory, + lensRegistry: lensRegistry, + ctx: ctx, + source: enumerable.NewQueue[lensInput](), + outputPipe: outputPipe, + unknownVersionPipe: targetSource, + collectionHistory: collectionHistory, lensInputPipesBySchemaVersionIDs: map[schemaVersionID]enumerable.Queue[LensDoc]{ targetSchemaVersionID: targetSource, }, @@ -137,7 +136,7 @@ func (l *lens) Next() (bool, error) { // up to the output via any intermediary pipes. inputPipe = p } else { - historyLocation, ok := l.schemaVersionHistory[doc.SchemaVersionID] + historyLocation, ok := l.collectionHistory[doc.SchemaVersionID] if !ok { // We may recieve documents of unknown schema versions, they should // still be fed through the pipe system in order to preserve order. @@ -178,7 +177,7 @@ func (l *lens) Next() (bool, error) { break } - if historyLocation.targetVector > 0 { + if historyLocation.next.HasValue() { // Aquire a lens migration from the registery, using the junctionPipe as its source. // The new pipeHead will then be connected as a source to the next migration-stage on // the next loop. @@ -188,7 +187,7 @@ func (l *lens) Next() (bool, error) { } historyLocation = historyLocation.next.Value() - } else { + } else if historyLocation.previous.HasValue() { // Aquire a lens migration from the registery, using the junctionPipe as its source. // The new pipeHead will then be connected as a source to the next migration-stage on // the next loop. diff --git a/lens/registry.go b/lens/registry.go index ba24779611..ede3b99bb2 100644 --- a/lens/registry.go +++ b/lens/registry.go @@ -84,24 +84,30 @@ const DefaultPoolSize int = 5 // NewRegistry instantiates a new registery. // // It will be of size 5 (per schema version) if a size is not provided. -func NewRegistry(lensPoolSize immutable.Option[int], db TxnSource) client.LensRegistry { - var size int - if lensPoolSize.HasValue() { - size = lensPoolSize.Value() - } else { - size = DefaultPoolSize +func NewRegistry( + db TxnSource, + poolSize immutable.Option[int], + runtime immutable.Option[module.Runtime], +) client.LensRegistry { + registry := &lensRegistry{ + poolSize: DefaultPoolSize, + runtime: wasmtime.New(), + modulesByPath: map[string]module.Module{}, + lensPoolsByCollectionID: map[uint32]*lensPool{}, + reversedPoolsByCollectionID: map[uint32]*lensPool{}, + txnCtxs: map[uint64]*txnContext{}, + } + + if poolSize.HasValue() { + registry.poolSize = poolSize.Value() + } + if runtime.HasValue() { + registry.runtime = runtime.Value() } return &implicitTxnLensRegistry{ - db: db, - registry: &lensRegistry{ - poolSize: size, - runtime: wasmtime.New(), - modulesByPath: map[string]module.Module{}, - lensPoolsByCollectionID: map[uint32]*lensPool{}, - reversedPoolsByCollectionID: map[uint32]*lensPool{}, - txnCtxs: map[uint64]*txnContext{}, - }, + db: db, + registry: registry, } } diff --git a/licenses/BSL.txt b/licenses/BSL.txt index 38cf309ebc..64d1d657d6 100644 --- a/licenses/BSL.txt +++ b/licenses/BSL.txt @@ -7,7 +7,7 @@ Parameters Licensor: Democratized Data (D2) Foundation -Licensed Work: DefraDB v0.10.0 +Licensed Work: DefraDB v0.11.0 The Licensed Work is (c) 2023 D2 Foundation. @@ -28,7 +28,7 @@ Additional Use Grant: You may only use the Licensed Work for the -Change Date: 2028-03-08 +Change Date: 2028-05-03 Change License: Apache License, Version 2.0 diff --git a/logging/config.go b/logging/config.go deleted file mode 100644 index 63cde2ceb5..0000000000 --- a/logging/config.go +++ /dev/null @@ -1,264 +0,0 @@ -// Copyright 2022 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -package logging - -import ( - "context" - "io" - "os" -) - -type ( - // EncoderFormat is the format of the log output (JSON, CSV, ...). - EncoderFormat = int8 - EncoderFormatOption struct { - EncoderFormat EncoderFormat - HasValue bool - } -) - -// NewEncoderFormatOption creates a new EncoderFormatOption with the given value. -func NewEncoderFormatOption(v EncoderFormat) EncoderFormatOption { - return EncoderFormatOption{ - EncoderFormat: v, - HasValue: true, - } -} - -const ( - stderr = "stderr" - stdout = "stdout" - - JSON EncoderFormat = iota - CSV -) - -type ( - LogLevel = int8 - LogLevelOption struct { - LogLevel LogLevel - HasValue bool - } -) - -func NewLogLevelOption(v LogLevel) LogLevelOption { - return LogLevelOption{ - LogLevel: v, - HasValue: true, - } -} - -// Log levels. -const ( - Debug LogLevel = -1 - Info LogLevel = 0 - Warn LogLevel = 1 - Error LogLevel = 2 - Fatal LogLevel = 5 -) - -type EnableStackTraceOption struct { - EnableStackTrace bool - HasValue bool -} - -type EnableCallerOption struct { - EnableCaller bool - HasValue bool -} - -type DisableColorOption struct { - DisableColor bool - HasValue bool -} - -func NewEnableStackTraceOption(enable bool) EnableStackTraceOption { - return EnableStackTraceOption{ - EnableStackTrace: enable, - HasValue: true, - } -} - -func NewEnableCallerOption(enable bool) EnableCallerOption { - return EnableCallerOption{ - EnableCaller: enable, - HasValue: true, - } -} - -func NewDisableColorOption(disable bool) DisableColorOption { - return DisableColorOption{ - DisableColor: disable, - HasValue: true, - } -} - -type Config struct { - Level LogLevelOption - EncoderFormat EncoderFormatOption - EnableStackTrace EnableStackTraceOption - EnableCaller EnableCallerOption - DisableColor DisableColorOption - OutputPaths []string - OverridesByLoggerName map[string]Config - - Pipe io.Writer // this is used for testing purposes only -} - -func (c Config) forLogger(name string) Config { - loggerConfig := Config{ - Level: c.Level, - EnableStackTrace: c.EnableStackTrace, - DisableColor: c.DisableColor, - EnableCaller: c.EnableCaller, - EncoderFormat: c.EncoderFormat, - OutputPaths: c.OutputPaths, - Pipe: c.Pipe, - } - - if override, hasOverride := c.OverridesByLoggerName[name]; hasOverride { - if override.Level.HasValue { - loggerConfig.Level = override.Level - } - if override.EnableStackTrace.HasValue { - loggerConfig.EnableStackTrace = override.EnableStackTrace - } - if override.EnableCaller.HasValue { - loggerConfig.EnableCaller = override.EnableCaller - } - if override.DisableColor.HasValue { - loggerConfig.DisableColor = override.DisableColor - } - if override.EncoderFormat.HasValue { - loggerConfig.EncoderFormat = override.EncoderFormat - } - if len(override.OutputPaths) != 0 { - loggerConfig.OutputPaths = override.OutputPaths - } - if override.Pipe != nil { - loggerConfig.Pipe = override.Pipe - } - } - - return loggerConfig -} - -func (c Config) copy() Config { - overridesByLoggerName := make(map[string]Config, len(c.OverridesByLoggerName)) - for k, o := range c.OverridesByLoggerName { - overridesByLoggerName[k] = Config{ - Level: o.Level, - EnableStackTrace: o.EnableStackTrace, - EncoderFormat: o.EncoderFormat, - EnableCaller: o.EnableCaller, - DisableColor: o.DisableColor, - OutputPaths: o.OutputPaths, - Pipe: o.Pipe, - } - } - - return Config{ - Level: c.Level, - EnableStackTrace: c.EnableStackTrace, - EncoderFormat: c.EncoderFormat, - OutputPaths: c.OutputPaths, - EnableCaller: c.EnableCaller, - DisableColor: c.DisableColor, - OverridesByLoggerName: overridesByLoggerName, - Pipe: c.Pipe, - } -} - -// Create a new Config given new config options. Each updated Config field is handled. -func (oldConfig Config) with(newConfigOptions Config) Config { - newConfig := oldConfig.copy() - - if newConfigOptions.Level.HasValue { - newConfig.Level = newConfigOptions.Level - } - - if newConfigOptions.EnableStackTrace.HasValue { - newConfig.EnableStackTrace = newConfigOptions.EnableStackTrace - } - - if newConfigOptions.EnableCaller.HasValue { - newConfig.EnableCaller = newConfigOptions.EnableCaller - } - - if newConfigOptions.DisableColor.HasValue { - newConfig.DisableColor = newConfigOptions.DisableColor - } - - if newConfigOptions.EncoderFormat.HasValue { - newConfig.EncoderFormat = newConfigOptions.EncoderFormat - } - - if len(newConfigOptions.OutputPaths) != 0 { - newConfig.OutputPaths = validatePaths(newConfigOptions.OutputPaths) - } - - if newConfigOptions.Pipe != nil { - newConfig.Pipe = newConfigOptions.Pipe - } - - for k, o := range newConfigOptions.OverridesByLoggerName { - // We fully overwrite overrides to allow for ease of - // reset/removal (can provide empty to return to default) - newConfig.OverridesByLoggerName[k] = Config{ - Level: o.Level, - EnableStackTrace: o.EnableStackTrace, - EnableCaller: o.EnableCaller, - DisableColor: o.DisableColor, - EncoderFormat: o.EncoderFormat, - OutputPaths: validatePaths(o.OutputPaths), - Pipe: o.Pipe, - } - } - - return newConfig -} - -// validatePath ensure that all output paths are valid to avoid zap sync errors -// and also to ensure that the logs are not lost. -func validatePaths(paths []string) []string { - validatedPaths := make([]string, 0, len(paths)) - for _, p := range paths { - if p == stderr || p == stdout { - validatedPaths = append(validatedPaths, p) - continue - } - - if f, err := os.OpenFile(p, os.O_CREATE|os.O_APPEND, 0644); err != nil { - log.Info(context.Background(), "cannot use provided path", NewKV("err", err)) - } else { - err := f.Close() - if err != nil { - log.Info(context.Background(), "problem closing file", NewKV("err", err)) - } - - validatedPaths = append(validatedPaths, p) - } - } - - return validatedPaths -} - -func willOutputToStderrOrStdout(paths []string) bool { - if len(paths) == 0 { - return true - } - for _, p := range paths { - if p == stderr || p == stdout { - return true - } - } - return false -} diff --git a/logging/doc.go b/logging/doc.go deleted file mode 100644 index 2f6a0b8827..0000000000 --- a/logging/doc.go +++ /dev/null @@ -1,26 +0,0 @@ -// Copyright 2022 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -/* -Package logging abstracts away any underlying logging library providing -a single contact-point for the dependency allowing relatively easy -swapping out should we want to. - -This package allows configuration to be loaded and globally applied -after logger instances have been created, utilising an internal thread-safe -registry of named logger instances to apply the config to. - -Configuration may be applied globally, or to logger instances of a specific -name, with the named-configuration being used over the global settings if -both are provided. - -All configuration options are optional. -*/ -package logging diff --git a/logging/logger.go b/logging/logger.go deleted file mode 100644 index f93e305fce..0000000000 --- a/logging/logger.go +++ /dev/null @@ -1,311 +0,0 @@ -// Copyright 2022 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -package logging - -import ( - "context" - "fmt" - stdlog "log" - "os" - "sync" - - golog "github.com/ipfs/go-log" - gologV2 "github.com/ipfs/go-log/v2" - "go.uber.org/zap" - "go.uber.org/zap/zapcore" -) - -type logger struct { - name string - logger *zap.Logger - consoleLogger *stdlog.Logger - syncLock sync.RWMutex -} - -var _ Logger = (*logger)(nil) - -func mustNewLogger(name string) *logger { - l, err := buildZapLogger(name, Config{}) - if err != nil { - panic(err) - } - - return &logger{ - name: name, - logger: l, - } -} - -func (l *logger) Debug(ctx context.Context, message string, keyvals ...KV) { - l.syncLock.RLock() - defer l.syncLock.RUnlock() - - l.logger.Debug(message, toZapFields(keyvals)...) -} - -func (l *logger) Info(ctx context.Context, message string, keyvals ...KV) { - l.syncLock.RLock() - defer l.syncLock.RUnlock() - - l.logger.Info(message, toZapFields(keyvals)...) -} - -func (l *logger) Error(ctx context.Context, message string, keyvals ...KV) { - l.syncLock.RLock() - defer l.syncLock.RUnlock() - - l.logger.Error(message, toZapFields(keyvals)...) -} - -func (l *logger) ErrorE(ctx context.Context, message string, err error, keyvals ...KV) { - kvs := keyvals - kvs = append(kvs, NewKV("Error", err.Error())) - kvs = withStackTrace(err, kvs) - - l.syncLock.RLock() - defer l.syncLock.RUnlock() - - l.logger.Error(message, toZapFields(kvs)...) -} - -func (l *logger) Fatal(ctx context.Context, message string, keyvals ...KV) { - l.syncLock.RLock() - defer l.syncLock.RUnlock() - - l.logger.Fatal(message, toZapFields(keyvals)...) -} - -func (l *logger) FatalE(ctx context.Context, message string, err error, keyvals ...KV) { - kvs := keyvals - kvs = append(kvs, NewKV("Error", err.Error())) - kvs = withStackTrace(err, kvs) - - l.syncLock.RLock() - defer l.syncLock.RUnlock() - - l.logger.Fatal(message, toZapFields(kvs)...) -} - -func (l *logger) FeedbackInfo(ctx context.Context, message string, keyvals ...KV) { - l.Info(ctx, message, keyvals...) - l.syncLock.RLock() - defer l.syncLock.RUnlock() - if l.consoleLogger != nil { - l.consoleLogger.Println(message) - } -} - -func (l *logger) FeedbackError(ctx context.Context, message string, keyvals ...KV) { - l.Error(ctx, message, keyvals...) - l.syncLock.RLock() - defer l.syncLock.RUnlock() - if l.consoleLogger != nil { - l.consoleLogger.Println(message) - } -} - -func (l *logger) FeedbackErrorE(ctx context.Context, message string, err error, keyvals ...KV) { - l.ErrorE(ctx, message, err, keyvals...) - l.syncLock.RLock() - defer l.syncLock.RUnlock() - if l.consoleLogger != nil { - l.consoleLogger.Println(message) - if stack, hasStack := getStackTrace(err); hasStack { - l.consoleLogger.Println(stack) - } - } -} - -func (l *logger) FeedbackFatal(ctx context.Context, message string, keyvals ...KV) { - l.Fatal(ctx, message, keyvals...) - l.syncLock.RLock() - defer l.syncLock.RUnlock() - if l.consoleLogger != nil { - l.consoleLogger.Println(message) - } -} - -func (l *logger) FeedbackFatalE(ctx context.Context, message string, err error, keyvals ...KV) { - l.FatalE(ctx, message, err, keyvals...) - l.syncLock.RLock() - defer l.syncLock.RUnlock() - if l.consoleLogger != nil { - l.consoleLogger.Println(message) - if stack, hasStack := getStackTrace(err); hasStack { - l.consoleLogger.Println(stack) - } - } -} - -func (l *logger) Flush() error { - return l.logger.Sync() -} - -func toZapFields(keyvals []KV) []zap.Field { - result := make([]zap.Field, len(keyvals)) - for i, kv := range keyvals { - result[i] = zap.Any(kv.key, kv.value) - } - return result -} - -func (l *logger) ApplyConfig(config Config) { - newLogger, err := buildZapLogger(l.name, config) - if err != nil { - l.logger.Error("Error applying config to logger", zap.Error(err)) - return - } - - l.syncLock.Lock() - defer l.syncLock.Unlock() - - // We need sync the old log before swapping it out - _ = l.logger.Sync() - l.logger = newLogger - - if !willOutputToStderrOrStdout(config.OutputPaths) { - if config.Pipe != nil { // for testing purposes only - l.consoleLogger = stdlog.New(config.Pipe, "", 0) - } else { - l.consoleLogger = stdlog.New(os.Stderr, "", 0) - } - } else { - l.consoleLogger = nil - } -} - -func withStackTrace(err error, keyvals []KV) []KV { - if stack, hasStack := getStackTrace(err); hasStack { - return append(keyvals, NewKV("stacktrace", stack)) - } - - return keyvals -} - -func getStackTrace(err error) (string, bool) { - configMutex.RLock() - defer configMutex.RUnlock() - - if cachedConfig.EnableStackTrace.EnableStackTrace { - return fmt.Sprintf("%+v", err), true - } - - return "", false -} - -func buildZapLogger(name string, config Config) (*zap.Logger, error) { - const ( - encodingTypeConsole string = "console" - encodingTypeJSON string = "json" - ) - defaultConfig := zap.NewProductionConfig() - defaultConfig.Encoding = encodingTypeConsole - defaultConfig.EncoderConfig.ConsoleSeparator = ", " - defaultConfig.EncoderConfig.EncodeTime = zapcore.ISO8601TimeEncoder - defaultConfig.EncoderConfig.EncodeLevel = zapcore.CapitalColorLevelEncoder - defaultConfig.DisableStacktrace = true - defaultConfig.DisableCaller = true - - if config.Level.HasValue { - defaultConfig.Level = zap.NewAtomicLevelAt(zapcore.Level(config.Level.LogLevel)) - } - - if config.DisableColor.HasValue && config.DisableColor.DisableColor { - defaultConfig.EncoderConfig.EncodeLevel = zapcore.CapitalLevelEncoder - } - - if config.EnableCaller.HasValue { - defaultConfig.DisableCaller = !config.EnableCaller.EnableCaller - } - - if config.EncoderFormat.HasValue { - if config.EncoderFormat.EncoderFormat == JSON { - defaultConfig.Encoding = encodingTypeJSON - defaultConfig.EncoderConfig.EncodeLevel = zapcore.CapitalLevelEncoder - } else if config.EncoderFormat.EncoderFormat == CSV { - defaultConfig.Encoding = encodingTypeConsole - } - } - - if len(config.OutputPaths) != 0 { - defaultConfig.OutputPaths = config.OutputPaths[:] - } - - // We must skip the first caller, as this will always be our wrapper - newLogger, err := defaultConfig.Build(zap.AddCallerSkip(1)) - if err != nil { - return nil, err - } - - if willOutputToStderrOrStdout(defaultConfig.OutputPaths) && config.Pipe != nil { - newLogger = newLogger.WithOptions(zap.WrapCore(func(zapcore.Core) zapcore.Core { - cfg := zap.NewProductionEncoderConfig() - cfg.ConsoleSeparator = defaultConfig.EncoderConfig.ConsoleSeparator - cfg.EncodeTime = defaultConfig.EncoderConfig.EncodeTime - cfg.EncodeLevel = defaultConfig.EncoderConfig.EncodeLevel - return zapcore.NewCore( - zapcore.NewJSONEncoder(cfg), - zapcore.Lock(zapcore.AddSync(config.Pipe)), - zap.NewAtomicLevelAt(zapcore.Level(config.Level.LogLevel)), - ) - })) - } - - return newLogger.Named(name), nil -} - -/* - The following are wrappers for external packages loggers that are compatible with - our own logger (i.e. Zap based). They offer a way to access the internal logger stores - and apply our configuration. They should implement ApplyConfig. -*/ - -// goLogger is a wrapper for a go-log logger -// Used by github.com/ipfs/go-ipfs-provider -type goLogger struct { - *logger - *golog.ZapEventLogger -} - -func GetGoLogger(name string) *goLogger { - l := mustNewLogger(name) - gl := golog.Logger(name) - return &goLogger{ - logger: l, - ZapEventLogger: gl, - } -} - -func (l *goLogger) ApplyConfig(config Config) { - l.logger.ApplyConfig(config) - l.ZapEventLogger.SugaredLogger = *l.logger.logger.Sugar() -} - -// goLoggerV2 is a wrapper for a go-log V2 logger -// Used by github.com/sourcenetwork/defradb/datastore/badger/v4 -type goLoggerV2 struct { - *logger - *gologV2.ZapEventLogger -} - -func GetGoLoggerV2(name string) *goLoggerV2 { - l := mustNewLogger(name) - gl := gologV2.Logger(name) - return &goLoggerV2{ - logger: l, - ZapEventLogger: gl, - } -} - -func (l *goLoggerV2) ApplyConfig(config Config) { - l.logger.ApplyConfig(config) - l.ZapEventLogger.SugaredLogger = *l.logger.logger.Sugar() -} diff --git a/logging/logging.go b/logging/logging.go deleted file mode 100644 index 1f1883bedb..0000000000 --- a/logging/logging.go +++ /dev/null @@ -1,77 +0,0 @@ -// Copyright 2022 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -package logging - -import ( - "context" -) - -var log = MustNewLogger("logging") - -// KV is a key-value pair used to pass structured data to loggers. -type KV struct { - key string - value any -} - -// NewKV creates a new KV key-value pair. -func NewKV(key string, value any) KV { - return KV{ - key: key, - value: value, - } -} - -type Logger interface { - // Debug logs a message at debug log level. Key-value pairs can be added. - Debug(ctx context.Context, message string, keyvals ...KV) - // Info logs a message at info log level. Key-value pairs can be added. - Info(ctx context.Context, message string, keyvals ...KV) - // Error logs a message at error log level. Key-value pairs can be added. - Error(ctx context.Context, message string, keyvals ...KV) - // ErrorErr logs a message and an error at error log level. Key-value pairs can be added. - ErrorE(ctx context.Context, message string, err error, keyvals ...KV) - // Fatal logs a message at fatal log level. Key-value pairs can be added. - Fatal(ctx context.Context, message string, keyvals ...KV) - // FatalE logs a message and an error at fatal log level. Key-value pairs can be added. - FatalE(ctx context.Context, message string, err error, keyvals ...KV) - - // Feedback prefixed method ensure that messsages reach a user in case the logs are sent to a file. - - // FeedbackInfo calls Info and sends the message to stderr if logs are sent to a file. - FeedbackInfo(ctx context.Context, message string, keyvals ...KV) - // FeedbackError calls Error and sends the message to stderr if logs are sent to a file. - FeedbackError(ctx context.Context, message string, keyvals ...KV) - // FeedbackErrorE calls ErrorE and sends the message to stderr if logs are sent to a file. - FeedbackErrorE(ctx context.Context, message string, err error, keyvals ...KV) - // FeedbackFatal calls Fatal and sends the message to stderr if logs are sent to a file. - FeedbackFatal(ctx context.Context, message string, keyvals ...KV) - // FeedbackFatalE calls FatalE and sends the message to stderr if logs are sent to a file. - FeedbackFatalE(ctx context.Context, message string, err error, keyvals ...KV) - - // Flush flushes any buffered log entries. - Flush() error - // ApplyConfig updates the logger with a new config. - ApplyConfig(config Config) -} - -// MustNewLogger creates and registers a new logger with the given name, and panics if there is an error. -func MustNewLogger(name string) Logger { - logger := mustNewLogger(name) - register(name, logger) - return logger -} - -// SetConfig updates all registered loggers with the given config. -func SetConfig(newConfig Config) { - updatedConfig := setConfig(newConfig) - updateLoggers(updatedConfig) -} diff --git a/logging/logging_test.go b/logging/logging_test.go deleted file mode 100644 index 5a19cfb744..0000000000 --- a/logging/logging_test.go +++ /dev/null @@ -1,1011 +0,0 @@ -// Copyright 2022 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -// todo: The logger(s) appear to leak resources and do not close down promptly on windows, -// the log files have open readers when the Golang test runner attempts to delete them. -// See https://github.com/sourcenetwork/defradb/issues/2057 for more info. - -//go:build !windows - -package logging - -import ( - "bufio" - "bytes" - "context" - "encoding/json" - "io" - "os" - "os/exec" - "testing" - - "github.com/stretchr/testify/assert" - - "github.com/sourcenetwork/defradb/errors" -) - -func TestLogWritesFatalMessageToLogAndKillsProcess(t *testing.T) { - logMessage := "test log message" - - if os.Getenv("OS_EXIT") == "1" { - ctx := context.Background() - logPath := os.Getenv("LOG_PATH") - logger, logPath := getLogger(t, func(c *Config) { - c.OutputPaths = []string{logPath} - }) - - logger.Fatal(ctx, logMessage) - return - } - - dir := t.TempDir() - logPath := dir + "/log.txt" - cmd := exec.Command(os.Args[0], "-test.run=TestLogWritesFatalMessageToLogAndKillsProcess") - cmd.Env = append(os.Environ(), "OS_EXIT=1", "LOG_PATH="+logPath) - err := cmd.Run() - if e, ok := err.(*exec.ExitError); !ok || e.Success() { - t.Fatalf("Logger.Fatal failed to kill the process, error: %v", err) - } - - logLines, err := getLogLines(t, logPath) - if err != nil { - t.Fatal(err) - } - - if len(logLines) != 1 { - t.Fatalf("expecting exactly 1 log line but got %d lines", len(logLines)) - } - - assert.Equal(t, logMessage, logLines[0]["msg"]) - assert.Equal(t, "FATAL", logLines[0]["level"]) - assert.Equal(t, "TestLogName", logLines[0]["logger"]) - // caller is disabled by default - assert.NotContains(t, logLines[0], "logging_test.go") - // stacktrace is disabled by default - assert.NotContains(t, logLines[0], "stacktrace") -} - -func TestLogWritesFatalMessageWithStackTraceToLogAndKillsProcessGivenStackTraceEnabled(t *testing.T) { - logMessage := "test log message" - - if os.Getenv("OS_EXIT") == "1" { - ctx := context.Background() - logPath := os.Getenv("LOG_PATH") - logger, logPath := getLogger(t, func(c *Config) { - c.OutputPaths = []string{logPath} - c.EnableStackTrace = NewEnableStackTraceOption(true) - }) - - logger.Fatal(ctx, logMessage) - return - } - - dir := t.TempDir() - logPath := dir + "/log.txt" - cmd := exec.Command(os.Args[0], "-test.run=TestLogWritesFatalMessageWithStackTraceToLogAndKillsProcessGivenStackTraceEnabled") - cmd.Env = append(os.Environ(), "OS_EXIT=1", "LOG_PATH="+logPath) - err := cmd.Run() - if e, ok := err.(*exec.ExitError); !ok || e.Success() { - t.Fatalf("Logger.Fatal failed to kill the process, error: %v", err) - } - - logLines, err := getLogLines(t, logPath) - if err != nil { - t.Fatal(err) - } - - if len(logLines) != 1 { - t.Fatalf("expecting exactly 1 log line but got %d lines", len(logLines)) - } - - assert.Equal(t, logMessage, logLines[0]["msg"]) - assert.Equal(t, "FATAL", logLines[0]["level"]) - assert.Equal(t, "TestLogName", logLines[0]["logger"]) - // no stacktrace will be present since no error was sent to the logger. - assert.NotContains(t, logLines[0], "stacktrace") - // caller is disabled by default - assert.NotContains(t, logLines[0], "logging_test.go") -} - -func TestLogWritesFatalEMessageToLogAndKillsProcess(t *testing.T) { - logMessage := "test log message" - - if os.Getenv("OS_EXIT") == "1" { - ctx := context.Background() - logPath := os.Getenv("LOG_PATH") - logger, logPath := getLogger(t, func(c *Config) { - c.OutputPaths = []string{logPath} - }) - - logger.FatalE(ctx, logMessage, errors.New("dummy error")) - return - } - - dir := t.TempDir() - logPath := dir + "/log.txt" - cmd := exec.Command(os.Args[0], "-test.run=TestLogWritesFatalEMessageToLogAndKillsProcess") - cmd.Env = append(os.Environ(), "OS_EXIT=1", "LOG_PATH="+logPath) - err := cmd.Run() - if e, ok := err.(*exec.ExitError); !ok || e.Success() { - t.Fatalf("Logger.Fatal failed to kill the process, error: %v", err) - } - - logLines, err := getLogLines(t, logPath) - if err != nil { - t.Fatal(err) - } - - if len(logLines) != 1 { - t.Fatalf("expecting exactly 1 log line but got %d lines", len(logLines)) - } - - assert.Equal(t, logMessage, logLines[0]["msg"]) - assert.Equal(t, "FATAL", logLines[0]["level"]) - assert.Equal(t, "TestLogName", logLines[0]["logger"]) - // caller is disabled by default - assert.NotContains(t, logLines[0], "logging_test.go") - // stacktrace is disabled by default - assert.NotContains(t, logLines[0], "stacktrace") -} - -func TestLogWritesFatalEMessageWithStackTraceToLogAndKillsProcessGivenStackTraceEnabled(t *testing.T) { - logMessage := "test log message" - - if os.Getenv("OS_EXIT") == "1" { - ctx := context.Background() - logPath := os.Getenv("LOG_PATH") - logger, logPath := getLogger(t, func(c *Config) { - c.OutputPaths = []string{logPath} - c.EnableStackTrace = NewEnableStackTraceOption(true) - }) - - logger.FatalE(ctx, logMessage, errors.New("dummy error")) - return - } - - dir := t.TempDir() - logPath := dir + "/log.txt" - cmd := exec.Command(os.Args[0], "-test.run=TestLogWritesFatalEMessageWithStackTraceToLogAndKillsProcessGivenStackTraceEnabled") - cmd.Env = append(os.Environ(), "OS_EXIT=1", "LOG_PATH="+logPath) - err := cmd.Run() - if e, ok := err.(*exec.ExitError); !ok || e.Success() { - t.Fatalf("Logger.Fatal failed to kill the process, error: %v", err) - } - - logLines, err := getLogLines(t, logPath) - if err != nil { - t.Fatal(err) - } - - if len(logLines) != 1 { - t.Fatalf("expecting exactly 1 log line but got %d lines", len(logLines)) - } - - assert.Equal(t, logMessage, logLines[0]["msg"]) - assert.Equal(t, "FATAL", logLines[0]["level"]) - assert.Equal(t, "TestLogName", logLines[0]["logger"]) - assert.Contains(t, logLines[0], "stacktrace") - // caller is disabled by default - assert.NotContains(t, logLines[0], "logging_test.go") -} - -type LogLevelTestCase struct { - LogLevel LogLevel - LogFunc func(Logger, context.Context, string) - ExpectedLogLevel string - WithStackTrace bool - ExpectStackTrace bool - WithCaller bool -} - -func logDebug(l Logger, c context.Context, m string) { l.Debug(c, m) } -func logInfo(l Logger, c context.Context, m string) { l.Info(c, m) } -func logError(l Logger, c context.Context, m string) { l.Error(c, m) } -func logErrorE(l Logger, c context.Context, m string) { l.ErrorE(c, m, errors.New("test error")) } - -func getLogLevelTestCase() []LogLevelTestCase { - return []LogLevelTestCase{ - {Debug, logDebug, "DEBUG", false, false, true}, - {Debug, logDebug, "DEBUG", false, false, false}, - {Debug, logInfo, "INFO", false, false, false}, - {Debug, logError, "ERROR", false, false, false}, - {Debug, logError, "ERROR", true, false, false}, - {Debug, logErrorE, "ERROR", false, false, false}, - {Debug, logErrorE, "ERROR", true, true, false}, - {Info, logDebug, "", false, false, false}, - {Info, logInfo, "INFO", false, false, true}, - {Info, logInfo, "INFO", false, false, false}, - {Info, logError, "ERROR", false, false, false}, - {Info, logError, "ERROR", true, false, false}, - {Info, logErrorE, "ERROR", false, false, false}, - {Info, logErrorE, "ERROR", true, true, false}, - {Warn, logDebug, "", false, false, false}, - {Warn, logInfo, "", false, false, false}, - {Warn, logError, "ERROR", false, false, false}, - {Warn, logError, "ERROR", true, false, false}, - {Warn, logErrorE, "ERROR", false, false, false}, - {Warn, logErrorE, "ERROR", true, true, false}, - {Error, logDebug, "", false, false, false}, - {Error, logInfo, "", false, false, false}, - {Error, logError, "ERROR", false, false, true}, - {Error, logError, "ERROR", false, false, false}, - {Error, logError, "ERROR", true, false, false}, - {Error, logErrorE, "ERROR", false, false, false}, - {Error, logErrorE, "ERROR", true, true, false}, - {Fatal, logDebug, "", false, false, true}, - {Fatal, logDebug, "", false, false, false}, - {Fatal, logInfo, "", false, false, false}, - {Fatal, logError, "", false, false, false}, - {Fatal, logErrorE, "", false, false, false}, - } -} - -func TestLogWritesMessagesToLog(t *testing.T) { - defer clearConfig() - defer clearRegistry("TestLogName") - for _, tc := range getLogLevelTestCase() { - ctx := context.Background() - logger, logPath := getLogger(t, func(c *Config) { - c.Level = NewLogLevelOption(tc.LogLevel) - c.EnableStackTrace = NewEnableStackTraceOption(tc.WithStackTrace) - c.EnableCaller = NewEnableCallerOption(tc.WithCaller) - }) - logMessage := "test log message" - - tc.LogFunc(logger, ctx, logMessage) - logger.Flush() - - logLines, err := getLogLines(t, logPath) - if err != nil { - t.Fatal(err) - } - - if tc.ExpectedLogLevel == "" { - assert.Len(t, logLines, 0) - } else { - if len(logLines) != 1 { - t.Fatalf("expecting exactly 1 log line but got %d lines", len(logLines)) - } - - assert.Equal(t, logMessage, logLines[0]["msg"]) - assert.Equal(t, tc.ExpectedLogLevel, logLines[0]["level"]) - assert.Equal(t, "TestLogName", logLines[0]["logger"]) - _, hasStackTrace := logLines[0]["stacktrace"] - assert.Equal(t, tc.ExpectStackTrace, hasStackTrace) - _, hasCaller := logLines[0]["caller"] - assert.Equal(t, tc.WithCaller, hasCaller) - } - - clearRegistry("TestLogName") - } -} - -func TestLogWritesMessagesToLogGivenUpdatedLogLevel(t *testing.T) { - defer clearConfig() - defer clearRegistry("TestLogName") - for _, tc := range getLogLevelTestCase() { - ctx := context.Background() - logger, logPath := getLogger(t, func(c *Config) { - c.Level = NewLogLevelOption(Fatal) - }) - SetConfig(Config{ - Level: NewLogLevelOption(tc.LogLevel), - EnableStackTrace: NewEnableStackTraceOption(tc.WithStackTrace), - EnableCaller: NewEnableCallerOption(tc.WithCaller), - }) - logMessage := "test log message" - - tc.LogFunc(logger, ctx, logMessage) - logger.Flush() - - logLines, err := getLogLines(t, logPath) - if err != nil { - t.Fatal(err) - } - - if tc.ExpectedLogLevel == "" { - assert.Len(t, logLines, 0) - } else { - if len(logLines) != 1 { - t.Fatalf("expecting exactly 1 log line but got %d lines", len(logLines)) - } - - assert.Equal(t, logMessage, logLines[0]["msg"]) - assert.Equal(t, tc.ExpectedLogLevel, logLines[0]["level"]) - assert.Equal(t, "TestLogName", logLines[0]["logger"]) - _, hasStackTrace := logLines[0]["stacktrace"] - assert.Equal(t, tc.ExpectStackTrace, hasStackTrace) - _, hasCaller := logLines[0]["caller"] - assert.Equal(t, tc.WithCaller, hasCaller) - } - - clearRegistry("TestLogName") - } -} - -func TestLogWritesMessagesToLogGivenUpdatedContextLogLevel(t *testing.T) { - defer clearConfig() - defer clearRegistry("TestLogName") - for _, tc := range getLogLevelTestCase() { - ctx := context.Background() - logger, logPath := getLogger(t, func(c *Config) { - c.Level = NewLogLevelOption(Fatal) - }) - SetConfig(Config{ - Level: NewLogLevelOption(Error), - }) - SetConfig(Config{ - Level: NewLogLevelOption(tc.LogLevel), - EnableStackTrace: NewEnableStackTraceOption(tc.WithStackTrace), - EnableCaller: NewEnableCallerOption(tc.WithCaller), - }) - logMessage := "test log message" - - tc.LogFunc(logger, ctx, logMessage) - logger.Flush() - - logLines, err := getLogLines(t, logPath) - if err != nil { - t.Fatal(err) - } - - if tc.ExpectedLogLevel == "" { - assert.Len(t, logLines, 0) - } else { - if len(logLines) != 1 { - t.Fatalf("expecting exactly 1 log line but got %d lines", len(logLines)) - } - - assert.Equal(t, logMessage, logLines[0]["msg"]) - assert.Equal(t, tc.ExpectedLogLevel, logLines[0]["level"]) - assert.Equal(t, "TestLogName", logLines[0]["logger"]) - _, hasStackTrace := logLines[0]["stacktrace"] - assert.Equal(t, tc.ExpectStackTrace, hasStackTrace) - _, hasCaller := logLines[0]["caller"] - assert.Equal(t, tc.WithCaller, hasCaller) - } - - clearRegistry("TestLogName") - } -} - -func TestLogDoesntWriteMessagesToLogGivenNoLogPath(t *testing.T) { - defer clearConfig() - defer clearRegistry("TestLogName") - for _, tc := range getLogLevelTestCase() { - ctx := context.Background() - b := &bytes.Buffer{} - logger, _ := getLogger(t, func(c *Config) { - c.Level = NewLogLevelOption(tc.LogLevel) - c.OutputPaths = []string{} - c.Pipe = b - }) - - logMessage := "test log message" - - tc.LogFunc(logger, ctx, logMessage) - logger.Flush() - - logLines, err := parseLines(b) - if err != nil { - t.Fatal(err) - } - - if tc.ExpectedLogLevel == "" { - assert.Len(t, logLines, 0) - } else { - if len(logLines) != 1 { - t.Fatalf("expecting exactly 1 log line but got %d lines", len(logLines)) - } - assert.Equal(t, logMessage, logLines[0]["msg"]) - assert.Equal(t, tc.ExpectedLogLevel, logLines[0]["level"]) - assert.Equal(t, "TestLogName", logLines[0]["logger"]) - } - - clearRegistry("TestLogName") - } -} - -func TestLogDoesntWriteMessagesToLogGivenNotFoundLogPath(t *testing.T) { - defer clearConfig() - defer clearRegistry("TestLogName") - for _, tc := range getLogLevelTestCase() { - ctx := context.Background() - b := &bytes.Buffer{} - logger, _ := getLogger(t, func(c *Config) { - c.Level = NewLogLevelOption(tc.LogLevel) - c.OutputPaths = []string{"/path/not/found"} - c.Pipe = b - }) - - logMessage := "test log message" - - tc.LogFunc(logger, ctx, logMessage) - logger.Flush() - - logLines, err := parseLines(b) - if err != nil { - t.Fatal(err) - } - - if tc.ExpectedLogLevel == "" { - assert.Len(t, logLines, 0) - } else { - if len(logLines) != 1 { - t.Fatalf("expecting exactly 1 log line but got %d lines", len(logLines)) - } - assert.Equal(t, logMessage, logLines[0]["msg"]) - assert.Equal(t, tc.ExpectedLogLevel, logLines[0]["level"]) - assert.Equal(t, "TestLogName", logLines[0]["logger"]) - } - - clearRegistry("TestLogName") - } -} - -func TestLogDoesntWriteMessagesToLogGivenStderrLogPath(t *testing.T) { - defer clearConfig() - defer clearRegistry("TestLogName") - for _, tc := range getLogLevelTestCase() { - ctx := context.Background() - b := &bytes.Buffer{} - logger, _ := getLogger(t, func(c *Config) { - c.Level = NewLogLevelOption(tc.LogLevel) - c.OutputPaths = []string{stderr} - c.Pipe = b - }) - - logMessage := "test log message" - - tc.LogFunc(logger, ctx, logMessage) - logger.Flush() - - logLines, err := parseLines(b) - if err != nil { - t.Fatal(err) - } - - if tc.ExpectedLogLevel == "" { - assert.Len(t, logLines, 0) - } else { - if len(logLines) != 1 { - t.Fatalf("expecting exactly 1 log line but got %d lines", len(logLines)) - } - assert.Equal(t, logMessage, logLines[0]["msg"]) - assert.Equal(t, tc.ExpectedLogLevel, logLines[0]["level"]) - assert.Equal(t, "TestLogName", logLines[0]["logger"]) - } - - clearRegistry("TestLogName") - } -} - -func TestLogWritesMessagesToLogGivenUpdatedLogPath(t *testing.T) { - defer clearConfig() - defer clearRegistry("TestLogName") - for _, tc := range getLogLevelTestCase() { - ctx := context.Background() - logger, _ := getLogger(t, func(c *Config) { - c.Level = NewLogLevelOption(tc.LogLevel) - c.OutputPaths = []string{} - }) - - dir := t.TempDir() - logPath := dir + "/log.txt" - SetConfig(Config{ - OutputPaths: []string{logPath}, - }) - logMessage := "test log message" - - tc.LogFunc(logger, ctx, logMessage) - logger.Flush() - - logLines, err := getLogLines(t, logPath) - if err != nil { - t.Fatal(err) - } - - if tc.ExpectedLogLevel == "" { - assert.Len(t, logLines, 0) - } else { - if len(logLines) != 1 { - t.Fatalf("expecting exactly 1 log line but got %d lines", len(logLines)) - } - - assert.Equal(t, logMessage, logLines[0]["msg"]) - assert.Equal(t, tc.ExpectedLogLevel, logLines[0]["level"]) - assert.Equal(t, "TestLogName", logLines[0]["logger"]) - } - - clearRegistry("TestLogName") - } -} - -func logFeedbackInfo(l Logger, c context.Context, m string) { l.FeedbackInfo(c, m) } -func logFeedbackError(l Logger, c context.Context, m string) { l.FeedbackError(c, m) } -func logFeedbackErrorE(l Logger, c context.Context, m string) { - l.FeedbackErrorE(c, m, errors.New("test error")) -} - -func getFeedbackLogLevelTestCase() []LogLevelTestCase { - return []LogLevelTestCase{ - {Debug, logFeedbackInfo, "INFO", false, false, false}, - {Debug, logFeedbackError, "ERROR", false, false, false}, - {Debug, logFeedbackError, "ERROR", true, false, false}, - {Debug, logFeedbackErrorE, "ERROR", false, false, false}, - {Debug, logFeedbackErrorE, "ERROR", true, true, false}, - {Info, logFeedbackInfo, "INFO", false, false, true}, - {Info, logFeedbackInfo, "INFO", false, false, false}, - {Info, logFeedbackError, "ERROR", false, false, false}, - {Info, logFeedbackError, "ERROR", true, false, false}, - {Info, logFeedbackErrorE, "ERROR", false, false, false}, - {Info, logFeedbackErrorE, "ERROR", true, true, false}, - {Warn, logFeedbackInfo, "", false, false, false}, - {Warn, logFeedbackError, "ERROR", false, false, false}, - {Warn, logFeedbackError, "ERROR", true, false, false}, - {Warn, logFeedbackErrorE, "ERROR", false, false, false}, - {Warn, logFeedbackErrorE, "ERROR", true, true, false}, - {Error, logFeedbackInfo, "", false, false, false}, - {Error, logFeedbackError, "ERROR", false, false, true}, - {Error, logFeedbackError, "ERROR", false, false, false}, - {Error, logFeedbackError, "ERROR", true, false, false}, - {Error, logFeedbackErrorE, "ERROR", false, false, false}, - {Error, logFeedbackErrorE, "ERROR", true, true, false}, - {Fatal, logFeedbackInfo, "", false, false, false}, - {Fatal, logFeedbackError, "", false, false, false}, - {Fatal, logFeedbackErrorE, "", false, false, false}, - } -} - -func TestLogWritesMessagesToFeedbackLog(t *testing.T) { - defer clearConfig() - defer clearRegistry("TestLogName") - for i, tc := range getFeedbackLogLevelTestCase() { - ctx := context.Background() - b := &bytes.Buffer{} - logger, logPath := getLogger(t, func(c *Config) { - c.Level = NewLogLevelOption(tc.LogLevel) - c.EnableStackTrace = NewEnableStackTraceOption(tc.WithStackTrace) - c.EnableCaller = NewEnableCallerOption(tc.WithCaller) - c.Pipe = b - }) - logMessage := "test log message" - - tc.LogFunc(logger, ctx, logMessage) - logger.Flush() - - logLines, err := getLogLines(t, logPath) - if err != nil { - t.Fatal(err) - } - - if tc.ExpectedLogLevel == "" { - assert.Len(t, logLines, 0) - } else { - if len(logLines) != 1 { - t.Fatalf("expecting exactly 1 log line but got %d lines for tc %d", len(logLines), i) - } - - assert.Equal(t, logMessage, logLines[0]["msg"]) - assert.Equal(t, tc.ExpectedLogLevel, logLines[0]["level"]) - assert.Equal(t, "TestLogName", logLines[0]["logger"]) - _, hasStackTrace := logLines[0]["stacktrace"] - assert.Equal(t, tc.ExpectStackTrace, hasStackTrace) - _, hasCaller := logLines[0]["caller"] - assert.Equal(t, tc.WithCaller, hasCaller) - } - - if tc.ExpectStackTrace { - assert.Contains(t, b.String(), logMessage+"\ntest error. Stack:") - } else { - assert.Equal(t, logMessage+"\n", b.String()) - } - - clearRegistry("TestLogName") - } -} - -func TestLogWritesMessagesToLogGivenPipeWithValidPath(t *testing.T) { - defer clearConfig() - defer clearRegistry("TestLogName") - ctx := context.Background() - b := &bytes.Buffer{} - logger, logPath := getLogger(t, func(c *Config) { - c.Level = NewLogLevelOption(Info) - c.Pipe = b - }) - logMessage := "test log message" - - logger.Info(ctx, logMessage) - logger.Flush() - - logLines, err := getLogLines(t, logPath) - if err != nil { - t.Fatal(err) - } - - if len(logLines) != 1 { - t.Fatalf("expecting exactly 1 log line but got %d lines", len(logLines)) - } - - assert.Equal(t, logMessage, logLines[0]["msg"]) - assert.Equal(t, "INFO", logLines[0]["level"]) - assert.Equal(t, "TestLogName", logLines[0]["logger"]) - // caller is disabled by default - assert.NotContains(t, logLines[0], "logging_test.go") -} - -func TestLogDoesNotWriteMessagesToLogGivenOverrideForAnotherLoggerReducingLogLevel(t *testing.T) { - defer clearConfig() - defer clearRegistry("TestLogName") - ctx := context.Background() - logger, logPath := getLogger(t, func(c *Config) { - c.Level = NewLogLevelOption(Fatal) - c.OverridesByLoggerName = map[string]Config{ - "not this logger": {Level: NewLogLevelOption(Info)}, - } - }) - logMessage := "test log message" - - logger.Info(ctx, logMessage) - logger.Flush() - - logLines, err := getLogLines(t, logPath) - if err != nil { - t.Fatal(err) - } - - assert.Len(t, logLines, 0) -} - -func TestLogWritesMessagesToLogGivenOverrideForLoggerReducingLogLevel(t *testing.T) { - defer clearConfig() - defer clearRegistry("TestLogName") - ctx := context.Background() - logger, logPath := getLogger(t, func(c *Config) { - c.Level = NewLogLevelOption(Fatal) - c.OverridesByLoggerName = map[string]Config{ - "TestLogName": {Level: NewLogLevelOption(Info)}, - } - }) - logMessage := "test log message" - - logger.Info(ctx, logMessage) - logger.Flush() - - logLines, err := getLogLines(t, logPath) - if err != nil { - t.Fatal(err) - } - - if len(logLines) != 1 { - t.Fatalf("expecting exactly 1 log line but got %d lines", len(logLines)) - } - - assert.Equal(t, logMessage, logLines[0]["msg"]) - assert.Equal(t, "INFO", logLines[0]["level"]) - assert.Equal(t, "TestLogName", logLines[0]["logger"]) - // caller is disabled by default - assert.NotContains(t, logLines[0], "logging_test.go") -} - -func TestLogWritesMessagesToLogGivenOverrideForLoggerRaisingLogLevel(t *testing.T) { - defer clearConfig() - defer clearRegistry("TestLogName") - ctx := context.Background() - logger, logPath := getLogger(t, func(c *Config) { - c.Level = NewLogLevelOption(Info) - c.OverridesByLoggerName = map[string]Config{ - "not this logger": {Level: NewLogLevelOption(Fatal)}, - } - }) - logMessage := "test log message" - - logger.Info(ctx, logMessage) - logger.Flush() - - logLines, err := getLogLines(t, logPath) - if err != nil { - t.Fatal(err) - } - - if len(logLines) != 1 { - t.Fatalf("expecting exactly 1 log line but got %d lines", len(logLines)) - } - - assert.Equal(t, logMessage, logLines[0]["msg"]) - assert.Equal(t, "INFO", logLines[0]["level"]) - assert.Equal(t, "TestLogName", logLines[0]["logger"]) - // caller is disabled by default - assert.NotContains(t, logLines[0], "logging_test.go") -} - -func TestLogDoesNotWriteMessagesToLogGivenOverrideForLoggerRaisingLogLevel(t *testing.T) { - defer clearConfig() - defer clearRegistry("TestLogName") - ctx := context.Background() - logger, logPath := getLogger(t, func(c *Config) { - c.Level = NewLogLevelOption(Info) - c.OverridesByLoggerName = map[string]Config{ - "TestLogName": {Level: NewLogLevelOption(Fatal)}, - } - }) - logMessage := "test log message" - - logger.Info(ctx, logMessage) - logger.Flush() - - logLines, err := getLogLines(t, logPath) - if err != nil { - t.Fatal(err) - } - - assert.Len(t, logLines, 0) -} - -func TestLogDoesNotWriteMessagesToLogGivenOverrideUpdatedForAnotherLoggerReducingLogLevel(t *testing.T) { - defer clearConfig() - defer clearRegistry("TestLogName") - ctx := context.Background() - logger, logPath := getLogger(t, func(c *Config) { - c.Level = NewLogLevelOption(Fatal) - }) - SetConfig(Config{ - OverridesByLoggerName: map[string]Config{ - "not this logger": {Level: NewLogLevelOption(Info)}, - }, - }) - logMessage := "test log message" - - logger.Info(ctx, logMessage) - logger.Flush() - - logLines, err := getLogLines(t, logPath) - if err != nil { - t.Fatal(err) - } - - assert.Len(t, logLines, 0) -} - -func TestLogWritesMessagesToLogGivenOverrideUpdatedForLoggerReducingLogLevel(t *testing.T) { - defer clearConfig() - defer clearRegistry("TestLogName") - ctx := context.Background() - logger, logPath := getLogger(t, func(c *Config) { - c.Level = NewLogLevelOption(Fatal) - }) - SetConfig(Config{ - OverridesByLoggerName: map[string]Config{ - "TestLogName": {Level: NewLogLevelOption(Info)}, - }, - }) - logMessage := "test log message" - - logger.Info(ctx, logMessage) - logger.Flush() - - logLines, err := getLogLines(t, logPath) - if err != nil { - t.Fatal(err) - } - - if len(logLines) != 1 { - t.Fatalf("expecting exactly 1 log line but got %d lines", len(logLines)) - } - - assert.Equal(t, logMessage, logLines[0]["msg"]) - assert.Equal(t, "INFO", logLines[0]["level"]) - assert.Equal(t, "TestLogName", logLines[0]["logger"]) - // caller is disabled by default - assert.NotContains(t, logLines[0], "logging_test.go") -} - -func TestLogWritesMessagesToLogGivenOverrideUpdatedForAnotherLoggerRaisingLogLevel(t *testing.T) { - defer clearConfig() - defer clearRegistry("TestLogName") - ctx := context.Background() - logger, logPath := getLogger(t, func(c *Config) { - c.Level = NewLogLevelOption(Info) - }) - SetConfig(Config{ - OverridesByLoggerName: map[string]Config{ - "not this logger": {Level: NewLogLevelOption(Fatal)}, - }, - }) - logMessage := "test log message" - - logger.Info(ctx, logMessage) - logger.Flush() - - logLines, err := getLogLines(t, logPath) - if err != nil { - t.Fatal(err) - } - - if len(logLines) != 1 { - t.Fatalf("expecting exactly 1 log line but got %d lines", len(logLines)) - } - - assert.Equal(t, logMessage, logLines[0]["msg"]) - assert.Equal(t, "INFO", logLines[0]["level"]) - assert.Equal(t, "TestLogName", logLines[0]["logger"]) - // caller is disabled by default - assert.NotContains(t, logLines[0], "logging_test.go") -} - -func TestLogDoesNotWriteMessagesToLogGivenOverrideUpdatedForLoggerRaisingLogLevel(t *testing.T) { - defer clearConfig() - defer clearRegistry("TestLogName") - ctx := context.Background() - logger, logPath := getLogger(t, func(c *Config) { - c.Level = NewLogLevelOption(Info) - }) - SetConfig(Config{ - OverridesByLoggerName: map[string]Config{ - "TestLogName": {Level: NewLogLevelOption(Fatal)}, - }, - }) - logMessage := "test log message" - - logger.Info(ctx, logMessage) - logger.Flush() - - logLines, err := getLogLines(t, logPath) - if err != nil { - t.Fatal(err) - } - - assert.Len(t, logLines, 0) -} - -func TestGetGoLogger(t *testing.T) { - l := GetGoLogger("TestLogName") - assert.NotNil(t, l.ZapEventLogger) - assert.NotNil(t, l.logger) -} - -func TestGetGoLoggerAndApplyConfig(t *testing.T) { - l := GetGoLogger("TestLogName") - assert.NotNil(t, l.ZapEventLogger) - assert.NotNil(t, l.logger) - - b := &bytes.Buffer{} - l.ApplyConfig(Config{ - EncoderFormat: NewEncoderFormatOption(JSON), - Pipe: b, - }) - - l.ZapEventLogger.Info("some info") - - logLines, err := parseLines(b) - if err != nil { - t.Fatal(err) - } - - if len(logLines) != 1 { - t.Fatalf("expecting exactly 1 log line but got %d lines", len(logLines)) - } - assert.Equal(t, "some info", logLines[0]["msg"]) - assert.Equal(t, "INFO", logLines[0]["level"]) - assert.Equal(t, "TestLogName", logLines[0]["logger"]) -} - -func TestGetGoLoggerV2(t *testing.T) { - l := GetGoLoggerV2("TestLogName") - assert.NotNil(t, l.ZapEventLogger) - assert.NotNil(t, l.logger) -} - -func TestGetGoLoggerV2AndApplyConfig(t *testing.T) { - l := GetGoLoggerV2("TestLogName") - assert.NotNil(t, l.ZapEventLogger) - assert.NotNil(t, l.logger) - - b := &bytes.Buffer{} - l.ApplyConfig(Config{ - EncoderFormat: NewEncoderFormatOption(JSON), - Pipe: b, - }) - - l.ZapEventLogger.Info("some info") - - logLines, err := parseLines(b) - if err != nil { - t.Fatal(err) - } - - if len(logLines) != 1 { - t.Fatalf("expecting exactly 1 log line but got %d lines", len(logLines)) - } - assert.Equal(t, "some info", logLines[0]["msg"]) - assert.Equal(t, "INFO", logLines[0]["level"]) - assert.Equal(t, "TestLogName", logLines[0]["logger"]) -} - -type Option = func(*Config) - -func getLogger(t *testing.T, options ...Option) (Logger, string) { - dir := t.TempDir() - logPath := dir + "/log.txt" - name := "TestLogName" - logConfig := Config{ - EncoderFormat: NewEncoderFormatOption(JSON), - OutputPaths: []string{logPath}, - } - - for _, o := range options { - o(&logConfig) - } - - logger := MustNewLogger(name) - SetConfig(logConfig) - return logger, getFirstOutputPath(logConfig.OutputPaths) -} - -func getFirstOutputPath(outputPaths []string) string { - if len(outputPaths) == 0 { - return stderr - } - return outputPaths[0] -} - -var errloggingToConsole = errors.New("no file to open. Logging to console") - -func getLogLines(t *testing.T, logPath string) ([]map[string]any, error) { - if logPath == stderr { - return nil, errloggingToConsole - } - - file, err := os.Open(logPath) - if err != nil { - return nil, err - } - defer func() { - err := file.Close() - if err != nil { - t.Error(err) - } - }() - - return parseLines(file) -} - -func parseLines(r io.Reader) ([]map[string]any, error) { - fileScanner := bufio.NewScanner(r) - - fileScanner.Split(bufio.ScanLines) - - logLines := []map[string]any{} - for fileScanner.Scan() { - loggedLine := make(map[string]any) - err := json.Unmarshal(fileScanner.Bytes(), &loggedLine) - if err != nil { - return nil, err - } - logLines = append(logLines, loggedLine) - } - - return logLines, nil -} - -func clearRegistry(name string) { - for _, logger := range registry[name] { - logger.Flush() - } - registry[name] = []Logger{} -} - -func clearConfig() { - configMutex.Lock() - defer configMutex.Unlock() - - cachedConfig = Config{} -} diff --git a/logging/registry.go b/logging/registry.go deleted file mode 100644 index 9410498a72..0000000000 --- a/logging/registry.go +++ /dev/null @@ -1,57 +0,0 @@ -// Copyright 2022 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -package logging - -import ( - "sync" -) - -var configMutex sync.RWMutex -var cachedConfig Config - -var registryMutex sync.Mutex -var registry = map[string][]Logger{ - "reprovider.simple": {GetGoLogger("reprovider.simple")}, - "badger": {GetGoLoggerV2("badger")}, -} - -func register(name string, logger Logger) { - registryMutex.Lock() - defer registryMutex.Unlock() - - loggers, exists := registry[name] - if !exists { - loggers = []Logger{} - } - loggers = append(loggers, logger) - registry[name] = loggers -} - -func setConfig(newConfig Config) Config { - configMutex.Lock() - defer configMutex.Unlock() - - cachedConfig = cachedConfig.with(newConfig) - return cachedConfig -} - -func updateLoggers(config Config) { - registryMutex.Lock() - defer registryMutex.Unlock() - - for loggerName, loggers := range registry { - newLoggerConfig := config.forLogger(loggerName) - - for _, logger := range loggers { - logger.ApplyConfig(newLoggerConfig) - } - } -} diff --git a/merkle/clock/clock.go b/merkle/clock/clock.go index 2bdc9fda93..3f1ae47cf6 100644 --- a/merkle/clock/clock.go +++ b/merkle/clock/clock.go @@ -19,13 +19,14 @@ import ( cid "github.com/ipfs/go-cid" ipld "github.com/ipfs/go-ipld-format" + "github.com/sourcenetwork/corelog" + "github.com/sourcenetwork/defradb/core" "github.com/sourcenetwork/defradb/datastore" - "github.com/sourcenetwork/defradb/logging" ) var ( - log = logging.MustNewLogger("merkleclock") + log = corelog.NewLogger("merkleclock") ) // MerkleClock is a MerkleCRDT clock that can be used to read/write events (deltas) to the clock. @@ -121,7 +122,6 @@ func (mc *MerkleClock) ProcessNode( nodeCid := node.Cid() priority := delta.GetPriority() - log.Debug(ctx, "Running ProcessNode", logging.NewKV("CID", nodeCid)) err := mc.crdt.Merge(ctx, delta) if err != nil { return NewErrMergingDelta(nodeCid, err) @@ -130,16 +130,13 @@ func (mc *MerkleClock) ProcessNode( links := node.Links() // check if we have any HEAD links hasHeads := false - log.Debug(ctx, "Stepping through node links") for _, l := range links { - log.Debug(ctx, "Checking link", logging.NewKV("Name", l.Name), logging.NewKV("CID", l.Cid)) if l.Name == "_head" { hasHeads = true break } } if !hasHeads { // reached the bottom, at a leaf - log.Debug(ctx, "No heads found") err := mc.headset.Write(ctx, nodeCid, priority) if err != nil { return NewErrAddingHead(nodeCid, err) @@ -148,14 +145,12 @@ func (mc *MerkleClock) ProcessNode( for _, l := range links { linkCid := l.Cid - log.Debug(ctx, "Scanning for replacement heads", logging.NewKV("Child", linkCid)) isHead, err := mc.headset.IsHead(ctx, linkCid) if err != nil { return NewErrCheckingHead(linkCid, err) } if isHead { - log.Debug(ctx, "Found head, replacing!") // reached one of the current heads, replace it with the tip // of current branch err = mc.headset.Replace(ctx, linkCid, nodeCid, priority) @@ -173,14 +168,13 @@ func (mc *MerkleClock) ProcessNode( if known { // we reached a non-head node in the known tree. // This means our root block is a new head - log.Debug(ctx, "Adding head") err := mc.headset.Write(ctx, nodeCid, priority) if err != nil { - log.ErrorE( + log.ErrorContextE( ctx, "Failure adding head (when root is a new head)", err, - logging.NewKV("Root", nodeCid), + corelog.Any("Root", nodeCid), ) // OR should this also return like below comment?? // return nil, errors.Wrap("error adding head (when root is new head): %s ", root, err) diff --git a/merkle/clock/heads.go b/merkle/clock/heads.go index cafc7cb6fa..2bbb04d2d9 100644 --- a/merkle/clock/heads.go +++ b/merkle/clock/heads.go @@ -18,10 +18,10 @@ import ( cid "github.com/ipfs/go-cid" "github.com/ipfs/go-datastore/query" + "github.com/sourcenetwork/corelog" "github.com/sourcenetwork/defradb/core" "github.com/sourcenetwork/defradb/datastore" - "github.com/sourcenetwork/defradb/logging" ) // heads manages the current Merkle-CRDT heads. @@ -55,12 +55,12 @@ func (hh *heads) IsHead(ctx context.Context, c cid.Cid) (bool, error) { // Replace replaces a head with a new CID. func (hh *heads) Replace(ctx context.Context, old cid.Cid, new cid.Cid, height uint64) error { - log.Info( + log.InfoContext( ctx, "Replacing DAG head", - logging.NewKV("Old", old), - logging.NewKV("CID", new), - logging.NewKV("Height", height)) + corelog.Any("Old", old), + corelog.Any("CID", new), + corelog.Uint64("Height", height)) err := hh.store.Delete(ctx, hh.key(old).ToDS()) if err != nil { @@ -91,7 +91,7 @@ func (hh *heads) List(ctx context.Context) ([]cid.Cid, uint64, error) { defer func() { err := results.Close() if err != nil { - log.ErrorE(ctx, "Error closing results", err) + log.ErrorContextE(ctx, "Error closing results", err) } }() diff --git a/merkle/crdt/composite.go b/merkle/crdt/composite.go index ee43348bdc..f58813235a 100644 --- a/merkle/crdt/composite.go +++ b/merkle/crdt/composite.go @@ -59,7 +59,6 @@ func (m *MerkleCompositeDAG) Delete( ) (ipld.Node, uint64, error) { // Set() call on underlying CompositeDAG CRDT // persist/publish delta - log.Debug(ctx, "Applying delta-mutator 'Delete' on CompositeDAG") delta := m.reg.Set(links) delta.Status = client.Deleted nd, err := m.clock.AddDAGNode(ctx, delta) @@ -78,7 +77,6 @@ func (m *MerkleCompositeDAG) Save(ctx context.Context, data any) (ipld.Node, uin } // Set() call on underlying CompositeDAG CRDT // persist/publish delta - log.Debug(ctx, "Applying delta-mutator 'Set' on CompositeDAG") delta := m.reg.Set(value) nd, err := m.clock.AddDAGNode(ctx, delta) if err != nil { diff --git a/merkle/crdt/pncounter.go b/merkle/crdt/counter.go similarity index 55% rename from merkle/crdt/pncounter.go rename to merkle/crdt/counter.go index 74b7adb156..6ca016cea6 100644 --- a/merkle/crdt/pncounter.go +++ b/merkle/crdt/counter.go @@ -1,4 +1,4 @@ -// Copyright 2023 Democratized Data Foundation +// Copyright 2024 Democratized Data Foundation // // Use of this software is governed by the Business Source License // included in the file licenses/BSL.txt. @@ -21,40 +21,41 @@ import ( "github.com/sourcenetwork/defradb/merkle/clock" ) -// MerklePNCounter is a MerkleCRDT implementation of the PNCounter using MerkleClocks. -type MerklePNCounter[T crdt.Incrementable] struct { +// MerkleCounter is a MerkleCRDT implementation of the Counter using MerkleClocks. +type MerkleCounter[T crdt.Incrementable] struct { *baseMerkleCRDT - reg crdt.PNCounter[T] + reg crdt.Counter[T] } -// NewMerklePNCounter creates a new instance (or loaded from DB) of a MerkleCRDT -// backed by a PNCounter CRDT. -func NewMerklePNCounter[T crdt.Incrementable]( +// NewMerkleCounter creates a new instance (or loaded from DB) of a MerkleCRDT +// backed by a Counter CRDT. +func NewMerkleCounter[T crdt.Incrementable]( store Stores, schemaVersionKey core.CollectionSchemaVersionKey, key core.DataStoreKey, fieldName string, -) *MerklePNCounter[T] { - register := crdt.NewPNCounter[T](store.Datastore(), schemaVersionKey, key, fieldName) + allowDecrement bool, +) *MerkleCounter[T] { + register := crdt.NewCounter[T](store.Datastore(), schemaVersionKey, key, fieldName, allowDecrement) clk := clock.NewMerkleClock(store.Headstore(), store.DAGstore(), key.ToHeadStoreKey(), register) base := &baseMerkleCRDT{clock: clk, crdt: register} - return &MerklePNCounter[T]{ + return &MerkleCounter[T]{ baseMerkleCRDT: base, reg: register, } } -// Save the value of the PN Counter to the DAG. -func (mPNC *MerklePNCounter[T]) Save(ctx context.Context, data any) (ipld.Node, uint64, error) { +// Save the value of the Counter to the DAG. +func (mc *MerkleCounter[T]) Save(ctx context.Context, data any) (ipld.Node, uint64, error) { value, ok := data.(*client.FieldValue) if !ok { - return nil, 0, NewErrUnexpectedValueType(client.PN_COUNTER, &client.FieldValue{}, data) + return nil, 0, NewErrUnexpectedValueType(mc.reg.CType(), &client.FieldValue{}, data) } - delta, err := mPNC.reg.Increment(ctx, value.Value().(T)) + delta, err := mc.reg.Increment(ctx, value.Value().(T)) if err != nil { return nil, 0, err } - nd, err := mPNC.clock.AddDAGNode(ctx, delta) + nd, err := mc.clock.AddDAGNode(ctx, delta) return nd, delta.GetPriority(), err } diff --git a/merkle/crdt/merklecrdt.go b/merkle/crdt/merklecrdt.go index b52fb7cf6d..5bd95c86cd 100644 --- a/merkle/crdt/merklecrdt.go +++ b/merkle/crdt/merklecrdt.go @@ -21,11 +21,6 @@ import ( "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/core" "github.com/sourcenetwork/defradb/datastore" - "github.com/sourcenetwork/defradb/logging" -) - -var ( - log = logging.MustNewLogger("merklecrdt") ) type Stores interface { @@ -71,12 +66,12 @@ func (base *baseMerkleCRDT) Value(ctx context.Context) ([]byte, error) { func InstanceWithStore( store Stores, schemaVersionKey core.CollectionSchemaVersionKey, - ctype client.CType, + cType client.CType, kind client.FieldKind, key core.DataStoreKey, fieldName string, ) (MerkleCRDT, error) { - switch ctype { + switch cType { case client.LWW_REGISTER: return NewMerkleLWWRegister( store, @@ -84,21 +79,23 @@ func InstanceWithStore( key, fieldName, ), nil - case client.PN_COUNTER: + case client.PN_COUNTER, client.P_COUNTER: switch kind { case client.FieldKind_NILLABLE_INT: - return NewMerklePNCounter[int64]( + return NewMerkleCounter[int64]( store, schemaVersionKey, key, fieldName, + cType == client.PN_COUNTER, ), nil case client.FieldKind_NILLABLE_FLOAT: - return NewMerklePNCounter[float64]( + return NewMerkleCounter[float64]( store, schemaVersionKey, key, fieldName, + cType == client.PN_COUNTER, ), nil } case client.COMPOSITE: @@ -109,5 +106,5 @@ func InstanceWithStore( fieldName, ), nil } - return nil, client.NewErrUnknownCRDT(ctype) + return nil, client.NewErrUnknownCRDT(cType) } diff --git a/net/client.go b/net/client.go index 20c33e33fd..414ee62e47 100644 --- a/net/client.go +++ b/net/client.go @@ -20,7 +20,6 @@ import ( "github.com/sourcenetwork/defradb/errors" "github.com/sourcenetwork/defradb/events" - "github.com/sourcenetwork/defradb/logging" pb "github.com/sourcenetwork/defradb/net/pb" ) @@ -33,13 +32,6 @@ var ( // pushLog creates a pushLog request and sends it to another node // over libp2p grpc connection func (s *server) pushLog(ctx context.Context, evt events.Update, pid peer.ID) error { - log.Debug( - ctx, - "Preparing pushLog request", - logging.NewKV("DocID", evt.DocID), - logging.NewKV("CID", evt.Cid), - logging.NewKV("SchemaRoot", evt.SchemaRoot)) - body := &pb.PushLogRequest_Body{ DocID: []byte(evt.DocID), Cid: evt.Cid.Bytes(), @@ -53,13 +45,6 @@ func (s *server) pushLog(ctx context.Context, evt events.Update, pid peer.ID) er Body: body, } - log.Debug( - ctx, "Pushing log", - logging.NewKV("DocID", evt.DocID), - logging.NewKV("CID", evt.Cid), - logging.NewKV("PeerID", pid), - ) - client, err := s.dial(pid) // grpc dial over P2P stream if err != nil { return NewErrPushLog(err) diff --git a/net/client_test.go b/net/client_test.go index 89c26e06b5..bedd28437d 100644 --- a/net/client_test.go +++ b/net/client_test.go @@ -22,13 +22,23 @@ import ( "github.com/sourcenetwork/defradb/events" ) -var sd = client.SchemaDescription{ - Name: "test", - Fields: []client.SchemaFieldDescription{ - { - Name: "test", - Kind: client.FieldKind_NILLABLE_STRING, - Typ: client.LWW_REGISTER, +var def = client.CollectionDefinition{ + Description: client.CollectionDescription{ + Fields: []client.CollectionFieldDescription{ + { + ID: 1, + Name: "test", + }, + }, + }, + Schema: client.SchemaDescription{ + Name: "test", + Fields: []client.SchemaFieldDescription{ + { + Name: "test", + Kind: client.FieldKind_NILLABLE_STRING, + Typ: client.LWW_REGISTER, + }, }, }, } @@ -38,7 +48,7 @@ func TestPushlogWithDialFailure(t *testing.T) { _, n := newTestNode(ctx, t) defer n.Close() - doc, err := client.NewDocFromJSON([]byte(`{"test": "test"}`), sd) + doc, err := client.NewDocFromJSON([]byte(`{"test": "test"}`), def) require.NoError(t, err) id, err := doc.GenerateDocID() require.NoError(t, err) @@ -67,7 +77,7 @@ func TestPushlogWithInvalidPeerID(t *testing.T) { _, n := newTestNode(ctx, t) defer n.Close() - doc, err := client.NewDocFromJSON([]byte(`{"test": "test"}`), sd) + doc, err := client.NewDocFromJSON([]byte(`{"test": "test"}`), def) require.NoError(t, err) id, err := doc.GenerateDocID() require.NoError(t, err) @@ -110,7 +120,7 @@ func TestPushlogW_WithValidPeerID_NoError(t *testing.T) { col, err := n1.db.GetCollectionByName(ctx, "User") require.NoError(t, err) - doc, err := client.NewDocFromJSON([]byte(`{"name": "test"}`), col.Schema()) + doc, err := client.NewDocFromJSON([]byte(`{"name": "test"}`), col.Definition()) require.NoError(t, err) err = col.Save(ctx, doc) diff --git a/net/dag.go b/net/dag.go index f083904915..cc20629c0f 100644 --- a/net/dag.go +++ b/net/dag.go @@ -19,8 +19,7 @@ import ( "github.com/ipfs/go-cid" ipld "github.com/ipfs/go-ipld-format" - - "github.com/sourcenetwork/defradb/logging" + "github.com/sourcenetwork/corelog" ) var ( @@ -100,13 +99,6 @@ func (p *Peer) sendJobWorker() { // initialization in New(). func (p *Peer) dagWorker(jobs chan *dagJob) { for job := range jobs { - log.Debug( - p.ctx, - "Starting new job from DAG queue", - logging.NewKV("Datastore Key", job.bp.dsKey), - logging.NewKV("CID", job.cid), - ) - select { case <-p.ctx.Done(): // drain jobs from queue when we are done @@ -119,7 +111,11 @@ func (p *Peer) dagWorker(jobs chan *dagJob) { if j.bp.getter != nil && j.cid.Defined() { cNode, err := j.bp.getter.Get(p.ctx, j.cid) if err != nil { - log.ErrorE(p.ctx, "Failed to get node", err, logging.NewKV("CID", j.cid)) + log.ErrorContextE( + p.ctx, + "Failed to get node", + err, + corelog.Any("CID", j.cid)) j.session.Done() return } @@ -130,7 +126,11 @@ func (p *Peer) dagWorker(jobs chan *dagJob) { j.isComposite, ) if err != nil { - log.ErrorE(p.ctx, "Failed to process remote block", err, logging.NewKV("CID", j.cid)) + log.ErrorContextE( + p.ctx, + "Failed to process remote block", + err, + corelog.Any("CID", j.cid)) } } p.queuedChildren.Remove(j.cid) diff --git a/net/dag_test.go b/net/dag_test.go index 524847bfb8..2072122b2d 100644 --- a/net/dag_test.go +++ b/net/dag_test.go @@ -63,7 +63,7 @@ func TestSendJobWorker_WithNewJob_NoError(t *testing.T) { col, err := db.GetCollectionByName(ctx, "User") require.NoError(t, err) - doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col.Schema()) + doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col.Definition()) require.NoError(t, err) dsKey := core.DataStoreKeyFromDocID(doc.ID()) @@ -107,7 +107,7 @@ func TestSendJobWorker_WithCloseJob_NoError(t *testing.T) { col, err := db.GetCollectionByName(ctx, "User") require.NoError(t, err) - doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col.Schema()) + doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col.Definition()) require.NoError(t, err) dsKey := core.DataStoreKeyFromDocID(doc.ID()) @@ -168,7 +168,7 @@ func TestSendJobWorker_WithPeer_NoError(t *testing.T) { col, err := db1.GetCollectionByName(ctx, "User") require.NoError(t, err) - doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col.Schema()) + doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col.Definition()) require.NoError(t, err) dsKey := core.DataStoreKeyFromDocID(doc.ID()) @@ -188,7 +188,6 @@ func TestSendJobWorker_WithPeer_NoError(t *testing.T) { var getter ipld.NodeGetter = n2.Peer.newDAGSyncerTxn(txn2) if sessionMaker, ok := getter.(SessionDAGSyncer); ok { - log.Debug(ctx, "Upgrading DAGSyncer with a session") getter = sessionMaker.Session(ctx) } diff --git a/net/errors.go b/net/errors.go index 1ca2d857d5..773eb8765d 100644 --- a/net/errors.go +++ b/net/errors.go @@ -29,12 +29,15 @@ const ( ) var ( - ErrPeerConnectionWaitTimout = errors.New("waiting for peer connection timed out") - ErrPubSubWaitTimeout = errors.New("waiting for pubsub timed out") - ErrPushLogWaitTimeout = errors.New("waiting for pushlog timed out") - ErrNilDB = errors.New("database object can't be nil") - ErrNilUpdateChannel = errors.New("tried to subscribe to update channel, but update channel is nil") - ErrSelfTargetForReplicator = errors.New("can't target ourselves as a replicator") + ErrP2PColHasPolicy = errors.New("p2p collection specified has a policy on it") + ErrReplicatorColHasPolicy = errors.New("replicator collection specified has a policy on it") + ErrReplicatorSomeColsHavePolicy = errors.New("replicator can not use all collections, as some have policy") + ErrPeerConnectionWaitTimout = errors.New("waiting for peer connection timed out") + ErrPubSubWaitTimeout = errors.New("waiting for pubsub timed out") + ErrPushLogWaitTimeout = errors.New("waiting for pushlog timed out") + ErrNilDB = errors.New("database object can't be nil") + ErrNilUpdateChannel = errors.New("tried to subscribe to update channel, but update channel is nil") + ErrSelfTargetForReplicator = errors.New("can't target ourselves as a replicator") ) func NewErrPushLog(inner error, kv ...errors.KV) error { diff --git a/net/net.go b/net/net.go index add509a709..c7dbaf1810 100644 --- a/net/net.go +++ b/net/net.go @@ -12,10 +12,8 @@ package net -import ( - "github.com/sourcenetwork/defradb/logging" -) +import "github.com/sourcenetwork/corelog" var ( - log = logging.MustNewLogger("net") + log = corelog.NewLogger("net") ) diff --git a/net/node.go b/net/node.go index 9245f78772..a52e296712 100644 --- a/net/node.go +++ b/net/node.go @@ -38,6 +38,7 @@ import ( "github.com/libp2p/go-libp2p/core/routing" "github.com/multiformats/go-multiaddr" + "github.com/sourcenetwork/corelog" "github.com/sourcenetwork/go-libp2p-pubsub-rpc/finalizer" // @TODO: https://github.com/sourcenetwork/defradb/issues/1902 @@ -46,7 +47,6 @@ import ( "github.com/libp2p/go-libp2p/p2p/net/connmgr" "github.com/sourcenetwork/defradb/client" - "github.com/sourcenetwork/defradb/logging" ) var evtWaitTimeout = 10 * time.Second @@ -144,11 +144,11 @@ func NewNode( if err != nil { return nil, fin.Cleanup(err) } - log.Info( + log.InfoContext( ctx, "Created LibP2P host", - logging.NewKV("PeerId", h.ID()), - logging.NewKV("Address", options.ListenAddresses), + corelog.Any("PeerId", h.ID()), + corelog.Any("Address", options.ListenAddresses), ) var ps *pubsub.PubSub @@ -214,10 +214,10 @@ func (n *Node) Bootstrap(addrs []peer.AddrInfo) { defer wg.Done() err := n.host.Connect(n.ctx, pinfo) if err != nil { - log.Info(n.ctx, "Cannot connect to peer", logging.NewKV("Error", err)) + log.InfoContext(n.ctx, "Cannot connect to peer", corelog.Any("Error", err)) return } - log.Info(n.ctx, "Connected", logging.NewKV("PeerID", pinfo.ID)) + log.InfoContext(n.ctx, "Connected", corelog.Any("PeerID", pinfo.ID)) atomic.AddUint64(&connected, 1) }(pinfo) } @@ -225,12 +225,12 @@ func (n *Node) Bootstrap(addrs []peer.AddrInfo) { wg.Wait() if nPeers := len(addrs); int(connected) < nPeers/2 { - log.Info(n.ctx, fmt.Sprintf("Only connected to %d bootstrap peers out of %d", connected, nPeers)) + log.InfoContext(n.ctx, fmt.Sprintf("Only connected to %d bootstrap peers out of %d", connected, nPeers)) } err := n.dht.Bootstrap(n.ctx) if err != nil { - log.ErrorE(n.ctx, "Problem bootstraping using DHT", err) + log.ErrorContextE(n.ctx, "Problem bootstraping using DHT", err) return } } @@ -254,7 +254,7 @@ func (n *Node) PeerInfo() peer.AddrInfo { func (n *Node) subscribeToPeerConnectionEvents() { sub, err := n.host.EventBus().Subscribe(new(event.EvtPeerConnectednessChanged)) if err != nil { - log.Info( + log.InfoContext( n.ctx, fmt.Sprintf("failed to subscribe to peer connectedness changed event: %v", err), ) @@ -276,7 +276,7 @@ func (n *Node) subscribeToPeerConnectionEvents() { func (n *Node) subscribeToPubSubEvents() { sub, err := n.host.EventBus().Subscribe(new(EvtPubSub)) if err != nil { - log.Info( + log.InfoContext( n.ctx, fmt.Sprintf("failed to subscribe to pubsub event: %v", err), ) @@ -298,7 +298,7 @@ func (n *Node) subscribeToPubSubEvents() { func (n *Node) subscribeToPushLogEvents() { sub, err := n.host.EventBus().Subscribe(new(EvtReceivedPushLog)) if err != nil { - log.Info( + log.InfoContext( n.ctx, fmt.Sprintf("failed to subscribe to push log event: %v", err), ) diff --git a/net/node_test.go b/net/node_test.go index 3b7f28d017..bf0bc653c5 100644 --- a/net/node_test.go +++ b/net/node_test.go @@ -31,13 +31,11 @@ import ( func FixtureNewMemoryDBWithBroadcaster(t *testing.T) client.DB { var database client.DB - var options []db.Option ctx := context.Background() - options = append(options, db.WithUpdateEvents()) opts := badgerds.Options{Options: badger.DefaultOptions("").WithInMemory(true)} rootstore, err := badgerds.NewDatastore("", &opts) require.NoError(t, err) - database, err = db.NewDB(ctx, rootstore, options...) + database, err = db.NewDB(ctx, rootstore, db.WithUpdateEvents()) require.NoError(t, err) return database } diff --git a/net/peer.go b/net/peer.go index 0c456d5b18..61711b3918 100644 --- a/net/peer.go +++ b/net/peer.go @@ -31,6 +31,7 @@ import ( "github.com/libp2p/go-libp2p/core/peer" peerstore "github.com/libp2p/go-libp2p/core/peerstore" "github.com/libp2p/go-libp2p/core/routing" + "github.com/sourcenetwork/corelog" "google.golang.org/grpc" "github.com/sourcenetwork/defradb/client" @@ -39,7 +40,6 @@ import ( "github.com/sourcenetwork/defradb/datastore" "github.com/sourcenetwork/defradb/errors" "github.com/sourcenetwork/defradb/events" - "github.com/sourcenetwork/defradb/logging" "github.com/sourcenetwork/defradb/merkle/clock" pb "github.com/sourcenetwork/defradb/net/pb" ) @@ -146,11 +146,11 @@ func (p *Peer) Start() error { addr := p.host.Peerstore().PeerInfo(id) err := p.host.Connect(p.ctx, addr) if err != nil { - log.Info( + log.InfoContext( p.ctx, "Failure while reconnecting to a known peer", - logging.NewKV("peer", id), - logging.NewKV("error", err), + corelog.Any("peer", id), + corelog.Any("error", err), ) } }(id) @@ -173,17 +173,20 @@ func (p *Peer) Start() error { } p.updateChannel = updateChannel - log.Info(p.ctx, "Starting internal broadcaster for pubsub network") + log.InfoContext(p.ctx, "Starting internal broadcaster for pubsub network") go p.handleBroadcastLoop() } - log.FeedbackInfo(p.ctx, "Starting P2P node", logging.NewKV("P2P addresses", p.host.Addrs())) + log.InfoContext( + p.ctx, + "Starting P2P node", + corelog.Any("P2P addresses", p.host.Addrs())) // register the P2P gRPC server go func() { pb.RegisterServiceServer(p.p2pRPC, p.server) if err := p.p2pRPC.Serve(p2plistener); err != nil && !errors.Is(err, grpc.ErrServerStopped) { - log.FatalE(p.ctx, "Fatal P2P RPC server error", err) + log.ErrorContextE(p.ctx, "Fatal P2P RPC server error", err) } }() @@ -197,13 +200,13 @@ func (p *Peer) Start() error { func (p *Peer) Close() { // close topics if err := p.server.removeAllPubsubTopics(); err != nil { - log.ErrorE(p.ctx, "Error closing pubsub topics", err) + log.ErrorContextE(p.ctx, "Error closing pubsub topics", err) } // stop gRPC server for _, c := range p.server.conns { if err := c.Close(); err != nil { - log.ErrorE(p.ctx, "Failed closing server RPC connections", err) + log.ErrorContextE(p.ctx, "Failed closing server RPC connections", err) } } stopGRPCServer(p.ctx, p.p2pRPC) @@ -212,12 +215,12 @@ func (p *Peer) Close() { // close event emitters if p.server.pubSubEmitter != nil { if err := p.server.pubSubEmitter.Close(); err != nil { - log.Info(p.ctx, "Could not close pubsub event emitter", logging.NewKV("Error", err.Error())) + log.InfoContext(p.ctx, "Could not close pubsub event emitter", corelog.Any("Error", err.Error())) } } if p.server.pushLogEmitter != nil { if err := p.server.pushLogEmitter.Close(); err != nil { - log.Info(p.ctx, "Could not close push log event emitter", logging.NewKV("Error", err.Error())) + log.InfoContext(p.ctx, "Could not close push log event emitter", corelog.Any("Error", err.Error())) } } @@ -226,11 +229,11 @@ func (p *Peer) Close() { } if err := p.bserv.Close(); err != nil { - log.ErrorE(p.ctx, "Error closing block service", err) + log.ErrorContextE(p.ctx, "Error closing block service", err) } if err := p.host.Close(); err != nil { - log.ErrorE(p.ctx, "Error closing host", err) + log.ErrorContextE(p.ctx, "Error closing host", err) } p.cancel() @@ -239,9 +242,7 @@ func (p *Peer) Close() { // handleBroadcast loop manages the transition of messages // from the internal broadcaster to the external pubsub network func (p *Peer) handleBroadcastLoop() { - log.Debug(p.ctx, "Waiting for messages on internal broadcaster") for { - log.Debug(p.ctx, "Handling internal broadcast bus message") update, isOpen := <-p.updateChannel if !isOpen { return @@ -255,11 +256,11 @@ func (p *Peer) handleBroadcastLoop() { } else if update.Priority > 1 { err = p.handleDocUpdateLog(update) } else { - log.Info(p.ctx, "Skipping log with invalid priority of 0", logging.NewKV("CID", update.Cid)) + log.InfoContext(p.ctx, "Skipping log with invalid priority of 0", corelog.Any("CID", update.Cid)) } if err != nil { - log.ErrorE(p.ctx, "Error while handling broadcast log", err) + log.ErrorContextE(p.ctx, "Error while handling broadcast log", err) } } } @@ -272,19 +273,13 @@ func (p *Peer) RegisterNewDocument( nd ipld.Node, schemaRoot string, ) error { - log.Debug( - p.ctx, - "Registering a new document for our peer node", - logging.NewKV("DocID", docID.String()), - ) - // register topic if err := p.server.addPubSubTopic(docID.String(), !p.server.hasPubSubTopic(schemaRoot)); err != nil { - log.ErrorE( + log.ErrorContextE( p.ctx, "Failed to create new pubsub topic", err, - logging.NewKV("DocID", docID.String()), + corelog.String("DocID", docID.String()), ) return err } @@ -315,7 +310,7 @@ func (p *Peer) pushToReplicator( ) { for docIDResult := range docIDsCh { if docIDResult.Err != nil { - log.ErrorE(ctx, "Key channel error", docIDResult.Err) + log.ErrorContextE(ctx, "Key channel error", docIDResult.Err) continue } docID := core.DataStoreKeyFromDocID(docIDResult.ID) @@ -325,30 +320,30 @@ func (p *Peer) pushToReplicator( ) cids, priority, err := headset.List(ctx) if err != nil { - log.ErrorE( + log.ErrorContextE( ctx, "Failed to get heads", err, - logging.NewKV("DocID", docIDResult.ID.String()), - logging.NewKV("PeerID", pid), - logging.NewKV("Collection", collection.Name())) + corelog.String("DocID", docIDResult.ID.String()), + corelog.Any("PeerID", pid), + corelog.Any("Collection", collection.Name())) continue } // loop over heads, get block, make the required logs, and send for _, c := range cids { blk, err := txn.DAGstore().Get(ctx, c) if err != nil { - log.ErrorE(ctx, "Failed to get block", err, - logging.NewKV("CID", c), - logging.NewKV("PeerID", pid), - logging.NewKV("Collection", collection.Name())) + log.ErrorContextE(ctx, "Failed to get block", err, + corelog.Any("CID", c), + corelog.Any("PeerID", pid), + corelog.Any("Collection", collection.Name())) continue } // @todo: remove encode/decode loop for core.Log data nd, err := dag.DecodeProtobuf(blk.RawData()) if err != nil { - log.ErrorE(ctx, "Failed to decode protobuf", err, logging.NewKV("CID", c)) + log.ErrorContextE(ctx, "Failed to decode protobuf", err, corelog.Any("CID", c)) continue } @@ -360,12 +355,12 @@ func (p *Peer) pushToReplicator( Priority: priority, } if err := p.server.pushLog(ctx, evt, pid); err != nil { - log.ErrorE( + log.ErrorContextE( ctx, "Failed to replicate log", err, - logging.NewKV("CID", c), - logging.NewKV("PeerID", pid), + corelog.Any("CID", c), + corelog.Any("PeerID", pid), ) } } @@ -397,7 +392,7 @@ func (p *Peer) loadReplicators(ctx context.Context) error { // This will be used during connection and stream creation by libp2p. p.host.Peerstore().AddAddrs(rep.Info.ID, rep.Info.Addrs, peerstore.PermanentAddrTTL) - log.Info(ctx, "loaded replicators from datastore", logging.NewKV("Replicator", rep)) + log.InfoContext(ctx, "loaded replicators from datastore", corelog.Any("Replicator", rep)) } return nil @@ -433,7 +428,7 @@ func (p *Peer) handleDocCreateLog(evt events.Update) error { return err } // push to each peer (replicator) - p.pushLogToReplicators(p.ctx, evt) + p.pushLogToReplicators(evt) return nil } @@ -443,12 +438,6 @@ func (p *Peer) handleDocUpdateLog(evt events.Update) error { if err != nil { return NewErrFailedToGetDocID(err) } - log.Debug( - p.ctx, - "Preparing pubsub pushLog request from broadcast", - logging.NewKV("DocID", docID), - logging.NewKV("CID", evt.Cid), - logging.NewKV("SchemaRoot", evt.SchemaRoot)) body := &pb.PushLogRequest_Body{ DocID: []byte(docID.String()), @@ -464,7 +453,7 @@ func (p *Peer) handleDocUpdateLog(evt events.Update) error { } // push to each peer (replicator) - p.pushLogToReplicators(p.ctx, evt) + p.pushLogToReplicators(evt) if err := p.server.publishLog(p.ctx, evt.DocID, req); err != nil { return NewErrPublishingToDocIDTopic(err, evt.Cid.String(), evt.DocID) @@ -477,7 +466,7 @@ func (p *Peer) handleDocUpdateLog(evt events.Update) error { return nil } -func (p *Peer) pushLogToReplicators(ctx context.Context, lg events.Update) { +func (p *Peer) pushLogToReplicators(lg events.Update) { // push to each peer (replicator) peers := make(map[string]struct{}) for _, peer := range p.ps.ListPeers(lg.DocID) { @@ -500,13 +489,13 @@ func (p *Peer) pushLogToReplicators(ctx context.Context, lg events.Update) { } go func(peerID peer.ID) { if err := p.server.pushLog(p.ctx, lg, peerID); err != nil { - log.ErrorE( + log.ErrorContextE( p.ctx, "Failed pushing log", err, - logging.NewKV("DocID", lg.DocID), - logging.NewKV("CID", lg.Cid), - logging.NewKV("PeerID", peerID)) + corelog.String("DocID", lg.DocID), + corelog.Any("CID", lg.Cid), + corelog.Any("PeerID", peerID)) } }(pid) } @@ -532,7 +521,7 @@ func (p *Peer) newDAGSyncerTxn(txn datastore.Txn) ipld.DAGService { func (p *Peer) Session(ctx context.Context) ipld.NodeGetter { ng := dag.NewSession(ctx, p.DAGService) if ng == p.DAGService { - log.Info(ctx, "DAGService does not support sessions") + log.InfoContext(ctx, "DAGService does not support sessions") } return ng } @@ -547,7 +536,7 @@ func stopGRPCServer(ctx context.Context, server *grpc.Server) { select { case <-timer.C: server.Stop() - log.Info(ctx, "Peer gRPC server was shutdown ungracefully") + log.InfoContext(ctx, "Peer gRPC server was shutdown ungracefully") case <-stopped: timer.Stop() } diff --git a/net/peer_collection.go b/net/peer_collection.go index 6f4f4d8ba8..8bf7ee337f 100644 --- a/net/peer_collection.go +++ b/net/peer_collection.go @@ -18,22 +18,27 @@ import ( "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/core" + "github.com/sourcenetwork/defradb/db" ) const marker = byte(0xff) func (p *Peer) AddP2PCollections(ctx context.Context, collectionIDs []string) error { - txn, err := p.db.NewTxn(p.ctx, false) + txn, err := p.db.NewTxn(ctx, false) if err != nil { return err } - defer txn.Discard(p.ctx) + defer txn.Discard(ctx) + + // TODO-ACP: Support ACP <> P2P - https://github.com/sourcenetwork/defradb/issues/2366 + // ctx = db.SetContextIdentity(ctx, identity) + ctx = db.SetContextTxn(ctx, txn) // first let's make sure the collections actually exists storeCollections := []client.Collection{} for _, col := range collectionIDs { - storeCol, err := p.db.WithTxn(txn).GetCollections( - p.ctx, + storeCol, err := p.db.GetCollections( + ctx, client.CollectionFetchOptions{ SchemaRoot: immutable.Some(col), }, @@ -47,6 +52,14 @@ func (p *Peer) AddP2PCollections(ctx context.Context, collectionIDs []string) er storeCollections = append(storeCollections, storeCol...) } + // Ensure none of the collections have a policy on them, until following is implemented: + // TODO-ACP: ACP <> P2P https://github.com/sourcenetwork/defradb/issues/2366 + for _, col := range storeCollections { + if col.Description().Policy.HasValue() { + return ErrP2PColHasPolicy + } + } + // Ensure we can add all the collections to the store on the transaction // before adding to topics. for _, col := range storeCollections { @@ -71,7 +84,7 @@ func (p *Peer) AddP2PCollections(ctx context.Context, collectionIDs []string) er // from the pubsub topics to avoid receiving duplicate events. removedTopics := []string{} for _, col := range storeCollections { - keyChan, err := col.GetAllDocIDs(p.ctx) + keyChan, err := col.GetAllDocIDs(ctx) if err != nil { return err } @@ -84,7 +97,7 @@ func (p *Peer) AddP2PCollections(ctx context.Context, collectionIDs []string) er } } - if err = txn.Commit(p.ctx); err != nil { + if err = txn.Commit(ctx); err != nil { err = p.rollbackRemovePubSubTopics(removedTopics, err) return p.rollbackAddPubSubTopics(addedTopics, err) } @@ -93,17 +106,21 @@ func (p *Peer) AddP2PCollections(ctx context.Context, collectionIDs []string) er } func (p *Peer) RemoveP2PCollections(ctx context.Context, collectionIDs []string) error { - txn, err := p.db.NewTxn(p.ctx, false) + txn, err := p.db.NewTxn(ctx, false) if err != nil { return err } - defer txn.Discard(p.ctx) + defer txn.Discard(ctx) + + // TODO-ACP: Support ACP <> P2P - https://github.com/sourcenetwork/defradb/issues/2366 + // ctx = db.SetContextIdentity(ctx, identity) + ctx = db.SetContextTxn(ctx, txn) // first let's make sure the collections actually exists storeCollections := []client.Collection{} for _, col := range collectionIDs { - storeCol, err := p.db.WithTxn(txn).GetCollections( - p.ctx, + storeCol, err := p.db.GetCollections( + ctx, client.CollectionFetchOptions{ SchemaRoot: immutable.Some(col), }, @@ -141,7 +158,7 @@ func (p *Peer) RemoveP2PCollections(ctx context.Context, collectionIDs []string) // to the pubsub topics. addedTopics := []string{} for _, col := range storeCollections { - keyChan, err := col.GetAllDocIDs(p.ctx) + keyChan, err := col.GetAllDocIDs(ctx) if err != nil { return err } @@ -154,7 +171,7 @@ func (p *Peer) RemoveP2PCollections(ctx context.Context, collectionIDs []string) } } - if err = txn.Commit(p.ctx); err != nil { + if err = txn.Commit(ctx); err != nil { err = p.rollbackAddPubSubTopics(addedTopics, err) return p.rollbackRemovePubSubTopics(removedTopics, err) } diff --git a/net/peer_replicator.go b/net/peer_replicator.go index 3638122a2a..ce5f7e23b6 100644 --- a/net/peer_replicator.go +++ b/net/peer_replicator.go @@ -20,6 +20,7 @@ import ( "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/core" + "github.com/sourcenetwork/defradb/db" ) func (p *Peer) SetReplicator(ctx context.Context, rep client.Replicator) error { @@ -39,24 +40,43 @@ func (p *Peer) SetReplicator(ctx context.Context, rep client.Replicator) error { return err } + // TODO-ACP: Support ACP <> P2P - https://github.com/sourcenetwork/defradb/issues/2366 + // ctx = db.SetContextIdentity(ctx, identity) + ctx = db.SetContextTxn(ctx, txn) + var collections []client.Collection switch { case len(rep.Schemas) > 0: // if specific collections are chosen get them by name for _, name := range rep.Schemas { - col, err := p.db.WithTxn(txn).GetCollectionByName(ctx, name) + col, err := p.db.GetCollectionByName(ctx, name) if err != nil { return NewErrReplicatorCollections(err) } + + if col.Description().Policy.HasValue() { + return ErrReplicatorColHasPolicy + } + collections = append(collections, col) } default: - // default to all collections - collections, err = p.db.WithTxn(txn).GetCollections(ctx, client.CollectionFetchOptions{}) + // default to all collections (unless a collection contains a policy). + // TODO-ACP: default to all collections after resolving https://github.com/sourcenetwork/defradb/issues/2366 + allCollections, err := p.db.GetCollections(ctx, client.CollectionFetchOptions{}) if err != nil { return NewErrReplicatorCollections(err) } + + for _, col := range allCollections { + // Can not default to all collections if any collection has a policy. + // TODO-ACP: remove this check/loop after https://github.com/sourcenetwork/defradb/issues/2366 + if col.Description().Policy.HasValue() { + return ErrReplicatorSomeColsHavePolicy + } + } + collections = allCollections } rep.Schemas = nil @@ -92,7 +112,7 @@ func (p *Peer) SetReplicator(ctx context.Context, rep client.Replicator) error { // push all collection documents to the replicator peer for _, col := range added { - keysCh, err := col.WithTxn(txn).GetAllDocIDs(ctx) + keysCh, err := col.GetAllDocIDs(ctx) if err != nil { return NewErrReplicatorDocID(err, col.Name().Value(), rep.Info.ID) } @@ -119,12 +139,15 @@ func (p *Peer) DeleteReplicator(ctx context.Context, rep client.Replicator) erro return err } + // set transaction for all operations + ctx = db.SetContextTxn(ctx, txn) + var collections []client.Collection switch { case len(rep.Schemas) > 0: // if specific collections are chosen get them by name for _, name := range rep.Schemas { - col, err := p.db.WithTxn(txn).GetCollectionByName(ctx, name) + col, err := p.db.GetCollectionByName(ctx, name) if err != nil { return NewErrReplicatorCollections(err) } @@ -139,7 +162,7 @@ func (p *Peer) DeleteReplicator(ctx context.Context, rep client.Replicator) erro default: // default to all collections - collections, err = p.db.WithTxn(txn).GetCollections(ctx, client.CollectionFetchOptions{}) + collections, err = p.db.GetCollections(ctx, client.CollectionFetchOptions{}) if err != nil { return NewErrReplicatorCollections(err) } diff --git a/net/peer_test.go b/net/peer_test.go index 0a863b8112..2ad5db9037 100644 --- a/net/peer_test.go +++ b/net/peer_test.go @@ -12,6 +12,7 @@ package net import ( "context" + "fmt" "testing" "time" @@ -25,6 +26,7 @@ import ( rpc "github.com/sourcenetwork/go-libp2p-pubsub-rpc" "github.com/stretchr/testify/require" + acpIdentity "github.com/sourcenetwork/defradb/acp/identity" "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/core/crdt" "github.com/sourcenetwork/defradb/datastore/memory" @@ -115,7 +117,7 @@ const randomMultiaddr = "/ip4/127.0.0.1/tcp/0" func newTestNode(ctx context.Context, t *testing.T) (client.DB, *Node) { store := memory.NewDatastore(ctx) - db, err := db.NewDB(ctx, store, db.WithUpdateEvents()) + db, err := db.NewDB(ctx, store, db.WithUpdateEvents(), db.WithACPInMemory()) require.NoError(t, err) n, err := NewNode( @@ -166,7 +168,7 @@ func TestNewPeer_WithExistingTopic_TopicAlreadyExistsError(t *testing.T) { col, err := db.GetCollectionByName(ctx, "User") require.NoError(t, err) - doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col.Schema()) + doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col.Definition()) require.NoError(t, err) err = col.Create(ctx, doc) @@ -331,7 +333,7 @@ func TestRegisterNewDocument_NoError(t *testing.T) { col, err := db.GetCollectionByName(ctx, "User") require.NoError(t, err) - doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col.Schema()) + doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col.Definition()) require.NoError(t, err) cid, err := createCID(doc) @@ -355,7 +357,7 @@ func TestRegisterNewDocument_RPCTopicAlreadyRegisteredError(t *testing.T) { col, err := db.GetCollectionByName(ctx, "User") require.NoError(t, err) - doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col.Schema()) + doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col.Definition()) require.NoError(t, err) _, err = rpc.NewTopic(ctx, n.Peer.ps, n.Peer.host.ID(), doc.ID().String(), true) @@ -389,6 +391,107 @@ func TestSetReplicator_NoError(t *testing.T) { require.NoError(t, err) } +// This test documents that we don't allow setting replicator with a collection that has a policy +// until the following is implemented: +// TODO-ACP: ACP <> P2P https://github.com/sourcenetwork/defradb/issues/2366 +func TestSetReplicatorWithACollectionSpecifiedThatHasPolicy_ReturnError(t *testing.T) { + ctx := context.Background() + d, n := newTestNode(ctx, t) + defer n.Close() + + policy := ` + description: a policy + actor: + name: actor + resources: + user: + permissions: + read: + expr: owner + write: + expr: owner + relations: + owner: + types: + - actor + ` + ctx = db.SetContextIdentity(ctx, acpIdentity.New("cosmos1zzg43wdrhmmk89z3pmejwete2kkd4a3vn7w969")) + policyResult, err := d.AddPolicy(ctx, policy) + policyID := policyResult.PolicyID + require.NoError(t, err) + require.Equal(t, "fc3a0a39c73949c70a79e02b8d928028e9cbcc772ba801463a6acdcf2f256cd4", policyID) + + schema := fmt.Sprintf(` + type User @policy(id: "%s", resource: "user") { + name: String + age: Int + } + `, policyID, + ) + _, err = d.AddSchema(ctx, schema) + require.NoError(t, err) + + info, err := peer.AddrInfoFromString("/ip4/0.0.0.0/tcp/0/p2p/QmYyQSo1c1Ym7orWxLYvCrM2EmxFTANf8wXmmE7DWjhx5N") + require.NoError(t, err) + + err = n.Peer.SetReplicator(ctx, client.Replicator{ + Info: *info, + Schemas: []string{"User"}, + }) + require.Error(t, err) + require.ErrorIs(t, err, ErrReplicatorColHasPolicy) +} + +// This test documents that we don't allow setting replicator using default option when any collection has a policy +// until the following is implemented: +// TODO-ACP: ACP <> P2P https://github.com/sourcenetwork/defradb/issues/2366 +func TestSetReplicatorWithSomeCollectionThatHasPolicyUsingAllCollectionsByDefault_ReturnError(t *testing.T) { + ctx := context.Background() + d, n := newTestNode(ctx, t) + defer n.Close() + + policy := ` + description: a policy + actor: + name: actor + resources: + user: + permissions: + read: + expr: owner + write: + expr: owner + relations: + owner: + types: + - actor + ` + ctx = db.SetContextIdentity(ctx, acpIdentity.New("cosmos1zzg43wdrhmmk89z3pmejwete2kkd4a3vn7w969")) + policyResult, err := d.AddPolicy(ctx, policy) + policyID := policyResult.PolicyID + require.NoError(t, err) + require.Equal(t, "fc3a0a39c73949c70a79e02b8d928028e9cbcc772ba801463a6acdcf2f256cd4", policyID) + + schema := fmt.Sprintf(` + type User @policy(id: "%s", resource: "user") { + name: String + age: Int + } + `, policyID, + ) + _, err = d.AddSchema(ctx, schema) + require.NoError(t, err) + + info, err := peer.AddrInfoFromString("/ip4/0.0.0.0/tcp/0/p2p/QmYyQSo1c1Ym7orWxLYvCrM2EmxFTANf8wXmmE7DWjhx5N") + require.NoError(t, err) + + err = n.Peer.SetReplicator(ctx, client.Replicator{ + Info: *info, + // Note: The missing explicit input of schemas here + }) + require.ErrorIs(t, err, ErrReplicatorSomeColsHavePolicy) +} + func TestSetReplicator_WithInvalidAddress_EmptyPeerIDError(t *testing.T) { ctx := context.Background() db, n := newTestNode(ctx, t) @@ -472,7 +575,7 @@ func TestPushToReplicator_SingleDocumentNoPeer_FailedToReplicateLogError(t *test col, err := db.GetCollectionByName(ctx, "User") require.NoError(t, err) - doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col.Schema()) + doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col.Definition()) require.NoError(t, err) err = col.Create(ctx, doc) @@ -698,6 +801,54 @@ func TestAddP2PCollections_WithInvalidCollectionID_NotFoundError(t *testing.T) { require.Error(t, err, ds.ErrNotFound) } +// This test documents that we don't allow adding p2p collections that have a policy +// until the following is implemented: +// TODO-ACP: ACP <> P2P https://github.com/sourcenetwork/defradb/issues/2366 +func TestAddP2PCollectionsWithPermissionedCollection_Error(t *testing.T) { + ctx := context.Background() + d, n := newTestNode(ctx, t) + defer n.Close() + + policy := ` + description: a policy + actor: + name: actor + resources: + user: + permissions: + read: + expr: owner + write: + expr: owner + relations: + owner: + types: + - actor + ` + ctx = db.SetContextIdentity(ctx, acpIdentity.New("cosmos1zzg43wdrhmmk89z3pmejwete2kkd4a3vn7w969")) + policyResult, err := d.AddPolicy(ctx, policy) + policyID := policyResult.PolicyID + require.NoError(t, err) + require.Equal(t, "fc3a0a39c73949c70a79e02b8d928028e9cbcc772ba801463a6acdcf2f256cd4", policyID) + + schema := fmt.Sprintf(` + type User @policy(id: "%s", resource: "user") { + name: String + age: Int + } + `, policyID, + ) + _, err = d.AddSchema(ctx, schema) + require.NoError(t, err) + + col, err := d.GetCollectionByName(ctx, "User") + require.NoError(t, err) + + err = n.Peer.AddP2PCollections(ctx, []string{col.SchemaRoot()}) + require.Error(t, err) + require.ErrorIs(t, err, ErrP2PColHasPolicy) +} + func TestAddP2PCollections_NoError(t *testing.T) { ctx := context.Background() db, n := newTestNode(ctx, t) @@ -789,7 +940,7 @@ func TestHandleDocCreateLog_NoError(t *testing.T) { col, err := db.GetCollectionByName(ctx, "User") require.NoError(t, err) - doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col.Schema()) + doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col.Definition()) require.NoError(t, err) err = col.Create(ctx, doc) @@ -842,7 +993,7 @@ func TestHandleDocCreateLog_WithExistingTopic_TopicExistsError(t *testing.T) { col, err := db.GetCollectionByName(ctx, "User") require.NoError(t, err) - doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col.Schema()) + doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col.Definition()) require.NoError(t, err) err = col.Create(ctx, doc) @@ -872,7 +1023,7 @@ func TestHandleDocUpdateLog_NoError(t *testing.T) { col, err := db.GetCollectionByName(ctx, "User") require.NoError(t, err) - doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col.Schema()) + doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col.Definition()) require.NoError(t, err) err = col.Create(ctx, doc) @@ -925,7 +1076,7 @@ func TestHandleDocUpdateLog_WithExistingDocIDTopic_TopicExistsError(t *testing.T col, err := db.GetCollectionByName(ctx, "User") require.NoError(t, err) - doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col.Schema()) + doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col.Definition()) require.NoError(t, err) err = col.Create(ctx, doc) @@ -969,7 +1120,7 @@ func TestHandleDocUpdateLog_WithExistingSchemaTopic_TopicExistsError(t *testing. col, err := db.GetCollectionByName(ctx, "User") require.NoError(t, err) - doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col.Schema()) + doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col.Definition()) require.NoError(t, err) err = col.Create(ctx, doc) diff --git a/net/process.go b/net/process.go index 5eec8a6efd..6779ada29f 100644 --- a/net/process.go +++ b/net/process.go @@ -22,13 +22,13 @@ import ( blocks "github.com/ipfs/go-block-format" "github.com/ipfs/go-cid" ipld "github.com/ipfs/go-ipld-format" + "github.com/sourcenetwork/corelog" "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/core" "github.com/sourcenetwork/defradb/datastore" "github.com/sourcenetwork/defradb/db/base" "github.com/sourcenetwork/defradb/errors" - "github.com/sourcenetwork/defradb/logging" merklecrdt "github.com/sourcenetwork/defradb/merkle/crdt" ) @@ -65,12 +65,12 @@ func (bp *blockProcessor) mergeBlocks(ctx context.Context) { nd := e.Value.(ipld.Node) err := bp.processBlock(ctx, nd, "") if err != nil { - log.ErrorE( + log.ErrorContextE( ctx, "Failed to process block", err, - logging.NewKV("DocID", bp.dsKey.DocID), - logging.NewKV("CID", nd.Cid()), + corelog.String("DocID", bp.dsKey.DocID), + corelog.Any("CID", nd.Cid()), ) } } @@ -78,7 +78,7 @@ func (bp *blockProcessor) mergeBlocks(ctx context.Context) { // processBlock merges the block and its children to the datastore and sets the head accordingly. func (bp *blockProcessor) processBlock(ctx context.Context, nd ipld.Node, field string) error { - crdt, err := initCRDTForType(ctx, bp.txn, bp.col, bp.dsKey, field) + crdt, err := initCRDTForType(bp.txn, bp.col, bp.dsKey, field) if err != nil { return err } @@ -107,12 +107,12 @@ func (bp *blockProcessor) processBlock(ctx context.Context, nd ipld.Node, field } if err := bp.processBlock(ctx, nd, link.Name); err != nil { - log.ErrorE( + log.ErrorContextE( ctx, "Failed to process block", err, - logging.NewKV("DocID", bp.dsKey.DocID), - logging.NewKV("CID", nd.Cid()), + corelog.String("DocID", bp.dsKey.DocID), + corelog.Any("CID", nd.Cid()), ) } } @@ -121,7 +121,6 @@ func (bp *blockProcessor) processBlock(ctx context.Context, nd ipld.Node, field } func initCRDTForType( - ctx context.Context, txn datastore.Txn, col client.Collection, dsKey core.DataStoreKey, @@ -131,7 +130,6 @@ func initCRDTForType( var ctype client.CType description := col.Description() if field == "" { // empty field name implies composite type - ctype = client.COMPOSITE key = base.MakeDataStoreKeyWithCollectionDescription( description, ).WithInstanceInfo( @@ -140,7 +138,6 @@ func initCRDTForType( core.COMPOSITE_NAMESPACE, ) - log.Debug(ctx, "Got CRDT Type", logging.NewKV("CType", ctype), logging.NewKV("Field", field)) return merklecrdt.NewMerkleCompositeDAG( txn, core.NewCollectionSchemaVersionKey(col.Schema().VersionID, col.ID()), @@ -157,7 +154,6 @@ func initCRDTForType( fieldID := fd.ID.String() key = base.MakeDataStoreKeyWithCollectionDescription(description).WithInstanceInfo(dsKey).WithFieldId(fieldID) - log.Debug(ctx, "Got CRDT Type", logging.NewKV("CType", ctype), logging.NewKV("Field", field)) return merklecrdt.InstanceWithStore( txn, core.NewCollectionSchemaVersionKey(col.Schema().VersionID, col.ID()), @@ -183,8 +179,6 @@ func (bp *blockProcessor) processRemoteBlock( nd ipld.Node, isComposite bool, ) error { - log.Debug(ctx, "Running processLog") - if err := bp.txn.DAGstore().Put(ctx, nd); err != nil { return err } @@ -218,15 +212,14 @@ func (bp *blockProcessor) handleChildBlocks( exist, err := bp.txn.DAGstore().Has(ctx, link.Cid) if err != nil { - log.Error( + log.ErrorContext( ctx, "Failed to check for existing block", - logging.NewKV("CID", link.Cid), - logging.NewKV("ERROR", err), + corelog.Any("CID", link.Cid), + corelog.Any("ERROR", err), ) } if exist { - log.Debug(ctx, "Already have block locally, skipping.", logging.NewKV("CID", link.Cid)) continue } diff --git a/net/server.go b/net/server.go index 206ccb3b53..ebf772a8bc 100644 --- a/net/server.go +++ b/net/server.go @@ -21,6 +21,7 @@ import ( format "github.com/ipfs/go-ipld-format" "github.com/libp2p/go-libp2p/core/event" libpeer "github.com/libp2p/go-libp2p/core/peer" + "github.com/sourcenetwork/corelog" rpc "github.com/sourcenetwork/go-libp2p-pubsub-rpc" "github.com/sourcenetwork/immutable" "google.golang.org/grpc" @@ -31,8 +32,8 @@ import ( "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/core" "github.com/sourcenetwork/defradb/datastore/badger/v4" + "github.com/sourcenetwork/defradb/db" "github.com/sourcenetwork/defradb/errors" - "github.com/sourcenetwork/defradb/logging" pb "github.com/sourcenetwork/defradb/net/pb" ) @@ -96,7 +97,6 @@ func newServer(p *Peer, db client.DB, opts ...grpc.DialOption) (*server, error) } // Get all DocIDs across all collections in the DB - log.Debug(p.ctx, "Getting all existing DocIDs...") cols, err := s.db.GetCollections(s.peer.ctx, client.CollectionFetchOptions{}) if err != nil { return nil, err @@ -108,34 +108,29 @@ func newServer(p *Peer, db client.DB, opts ...grpc.DialOption) (*server, error) if _, ok := colMap[col.SchemaRoot()]; ok { continue } + // TODO-ACP: Support ACP <> P2P - https://github.com/sourcenetwork/defradb/issues/2366 docIDChan, err := col.GetAllDocIDs(p.ctx) if err != nil { return nil, err } for docID := range docIDChan { - log.Debug( - p.ctx, - "Registering existing DocID pubsub topic", - logging.NewKV("DocID", docID.ID.String()), - ) if err := s.addPubSubTopic(docID.ID.String(), true); err != nil { return nil, err } i++ } } - log.Debug(p.ctx, "Finished registering all DocID pubsub topics", logging.NewKV("Count", i)) } var err error s.pubSubEmitter, err = s.peer.host.EventBus().Emitter(new(EvtPubSub)) if err != nil { - log.Info(s.peer.ctx, "could not create event emitter", logging.NewKV("Error", err.Error())) + log.InfoContext(s.peer.ctx, "could not create event emitter", corelog.String("Error", err.Error())) } s.pushLogEmitter, err = s.peer.host.EventBus().Emitter(new(EvtReceivedPushLog)) if err != nil { - log.Info(s.peer.ctx, "could not create event emitter", logging.NewKV("Error", err.Error())) + log.InfoContext(s.peer.ctx, "could not create event emitter", corelog.String("Error", err.Error())) } return s, nil @@ -200,8 +195,6 @@ func (s *server) PushLog(ctx context.Context, req *pb.PushLogRequest) (*pb.PushL if err != nil { return nil, err } - log.Debug(ctx, "Received a PushLog request", logging.NewKV("PeerID", pid)) - cid, err := cid.Cast(req.Body.Cid) if err != nil { return nil, err @@ -217,7 +210,7 @@ func (s *server) PushLog(ctx context.Context, req *pb.PushLogRequest) (*pb.PushL if s.pushLogEmitter != nil { byPeer, err := libpeer.Decode(req.Body.Creator) if err != nil { - log.Info(ctx, "could not decode the PeerID of the log creator", logging.NewKV("Error", err.Error())) + log.InfoContext(ctx, "could not decode the PeerID of the log creator", corelog.String("Error", err.Error())) } err = s.pushLogEmitter.Emit(EvtReceivedPushLog{ FromPeer: pid, @@ -226,7 +219,7 @@ func (s *server) PushLog(ctx context.Context, req *pb.PushLogRequest) (*pb.PushL if err != nil { // logging instead of returning an error because the event bus should // not break the PushLog execution. - log.Info(ctx, "could not emit push log event", logging.NewKV("Error", err.Error())) + log.InfoContext(ctx, "could not emit push log event", corelog.String("Error", err.Error())) } } }() @@ -243,7 +236,6 @@ func (s *server) PushLog(ctx context.Context, req *pb.PushLogRequest) (*pb.PushL return nil, errors.Wrap(fmt.Sprintf("failed to check for existing block %s", cid), err) } if exists { - log.Debug(ctx, fmt.Sprintf("Already have block %s locally, skipping.", cid)) return &pb.PushLogReply{}, nil } @@ -258,11 +250,13 @@ func (s *server) PushLog(ctx context.Context, req *pb.PushLogRequest) (*pb.PushL return nil, err } defer txn.Discard(ctx) - store := s.db.WithTxn(txn) + + // use a transaction for all operations + ctx = db.SetContextTxn(ctx, txn) // Currently a schema is the best way we have to link a push log request to a collection, // this will change with https://github.com/sourcenetwork/defradb/issues/1085 - col, err := s.getActiveCollection(ctx, store, string(req.Body.SchemaRoot)) + col, err := s.getActiveCollection(ctx, s.db, string(req.Body.SchemaRoot)) if err != nil { return nil, err } @@ -270,7 +264,6 @@ func (s *server) PushLog(ctx context.Context, req *pb.PushLogRequest) (*pb.PushL // Create a new DAG service with the current transaction var getter format.NodeGetter = s.peer.newDAGSyncerTxn(txn) if sessionMaker, ok := getter.(SessionDAGSyncer); ok { - log.Debug(ctx, "Upgrading DAGSyncer with a session") getter = sessionMaker.Session(ctx) } @@ -280,22 +273,22 @@ func (s *server) PushLog(ctx context.Context, req *pb.PushLogRequest) (*pb.PushL return nil, errors.Wrap("failed to decode block to ipld.Node", err) } - var session sync.WaitGroup + var wg sync.WaitGroup bp := newBlockProcessor(s.peer, txn, col, dsKey, getter) - err = bp.processRemoteBlock(ctx, &session, nd, true) + err = bp.processRemoteBlock(ctx, &wg, nd, true) if err != nil { - log.ErrorE( + log.ErrorContextE( ctx, "Failed to process remote block", err, - logging.NewKV("DocID", dsKey.DocID), - logging.NewKV("CID", cid), + corelog.String("DocID", dsKey.DocID), + corelog.Any("CID", cid), ) } - session.Wait() + wg.Wait() bp.mergeBlocks(ctx) - err = s.syncIndexedDocs(ctx, col.WithTxn(txn), docID) + err = s.syncIndexedDocs(ctx, col, docID) if err != nil { return nil, err } @@ -359,25 +352,27 @@ func (s *server) syncIndexedDocs( col client.Collection, docID client.DocID, ) error { - preTxnCol, err := s.db.GetCollectionByName(ctx, col.Name().Value()) - if err != nil { - return err - } + // remove transaction from old context + oldCtx := db.SetContextTxn(ctx, nil) - oldDoc, err := preTxnCol.Get(ctx, docID, false) - isNewDoc := errors.Is(err, client.ErrDocumentNotFound) + //TODO-ACP: https://github.com/sourcenetwork/defradb/issues/2365 + // Resolve while handling acp <> secondary indexes. + oldDoc, err := col.Get(oldCtx, docID, false) + isNewDoc := errors.Is(err, client.ErrDocumentNotFoundOrNotAuthorized) if !isNewDoc && err != nil { return err } + //TODO-ACP: https://github.com/sourcenetwork/defradb/issues/2365 + // Resolve while handling acp <> secondary indexes. doc, err := col.Get(ctx, docID, false) - isDeletedDoc := errors.Is(err, client.ErrDocumentNotFound) + isDeletedDoc := errors.Is(err, client.ErrDocumentNotFoundOrNotAuthorized) if !isDeletedDoc && err != nil { return err } if isDeletedDoc { - return preTxnCol.DeleteDocIndex(ctx, oldDoc) + return col.DeleteDocIndex(oldCtx, oldDoc) } else if isNewDoc { return col.CreateDocIndex(ctx, doc) } else { @@ -490,32 +485,14 @@ func (s *server) publishLog(ctx context.Context, topic string, req *pb.PushLogRe if _, err := t.Publish(ctx, data, rpc.WithIgnoreResponse(true)); err != nil { return errors.Wrap(fmt.Sprintf("failed publishing to thread %s", topic), err) } - - cid, err := cid.Cast(req.Body.Cid) - if err != nil { - return err - } - - log.Debug( - ctx, - "Published log", - logging.NewKV("CID", cid), - logging.NewKV("DocID", topic), - ) return nil } // pubSubMessageHandler handles incoming PushLog messages from the pubsub network. func (s *server) pubSubMessageHandler(from libpeer.ID, topic string, msg []byte) ([]byte, error) { - log.Debug( - s.peer.ctx, - "Handling new pubsub message", - logging.NewKV("SenderID", from), - logging.NewKV("Topic", topic), - ) req := new(pb.PushLogRequest) if err := proto.Unmarshal(msg, req); err != nil { - log.ErrorE(s.peer.ctx, "Failed to unmarshal pubsub message %s", err) + log.ErrorContextE(s.peer.ctx, "Failed to unmarshal pubsub message %s", err) return nil, err } @@ -523,7 +500,6 @@ func (s *server) pubSubMessageHandler(from libpeer.ID, topic string, msg []byte) Addr: addr{from}, }) if _, err := s.PushLog(ctx, req); err != nil { - log.ErrorE(ctx, "Failed pushing log for doc", err, logging.NewKV("Topic", topic)) return nil, errors.Wrap(fmt.Sprintf("Failed pushing log for doc %s", topic), err) } return nil, nil @@ -531,12 +507,12 @@ func (s *server) pubSubMessageHandler(from libpeer.ID, topic string, msg []byte) // pubSubEventHandler logs events from the subscribed DocID topics. func (s *server) pubSubEventHandler(from libpeer.ID, topic string, msg []byte) { - log.Info( + log.InfoContext( s.peer.ctx, "Received new pubsub event", - logging.NewKV("SenderId", from), - logging.NewKV("Topic", topic), - logging.NewKV("Message", string(msg)), + corelog.Any("SenderId", from), + corelog.String("Topic", topic), + corelog.String("Message", string(msg)), ) if s.pubSubEmitter != nil { @@ -544,7 +520,7 @@ func (s *server) pubSubEventHandler(from libpeer.ID, topic string, msg []byte) { Peer: from, }) if err != nil { - log.Info(s.peer.ctx, "could not emit pubsub event", logging.NewKV("Error", err.Error())) + log.InfoContext(s.peer.ctx, "could not emit pubsub event", corelog.Any("Error", err.Error())) } } } diff --git a/net/server_test.go b/net/server_test.go index 099f426887..916e234109 100644 --- a/net/server_test.go +++ b/net/server_test.go @@ -98,7 +98,9 @@ type mockCollection struct { func (mCol *mockCollection) SchemaRoot() string { return "mockColID" } -func (mCol *mockCollection) GetAllDocIDs(ctx context.Context) (<-chan client.DocIDResult, error) { +func (mCol *mockCollection) GetAllDocIDs( + ctx context.Context, +) (<-chan client.DocIDResult, error) { return nil, mockError } @@ -131,7 +133,7 @@ func TestNewServerWithAddTopicError(t *testing.T) { col, err := db.GetCollectionByName(ctx, "User") require.NoError(t, err) - doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col.Schema()) + doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col.Definition()) require.NoError(t, err) err = col.Create(ctx, doc) @@ -177,7 +179,7 @@ func TestNewServerWithEmitterError(t *testing.T) { col, err := db.GetCollectionByName(ctx, "User") require.NoError(t, err) - doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col.Schema()) + doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col.Definition()) require.NoError(t, err) err = col.Create(ctx, doc) @@ -260,7 +262,7 @@ func TestPushLog(t *testing.T) { col, err := db.GetCollectionByName(ctx, "User") require.NoError(t, err) - doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col.Schema()) + doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col.Definition()) require.NoError(t, err) cid, err := createCID(doc) diff --git a/node/node.go b/node/node.go index 89bedd56ff..9524247bf8 100644 --- a/node/node.go +++ b/node/node.go @@ -17,15 +17,15 @@ import ( gohttp "net/http" "github.com/libp2p/go-libp2p/core/peer" + "github.com/sourcenetwork/corelog" "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/db" "github.com/sourcenetwork/defradb/http" - "github.com/sourcenetwork/defradb/logging" "github.com/sourcenetwork/defradb/net" ) -var log = logging.MustNewLogger("node") +var log = corelog.NewLogger("node") // Options contains start configuration values. type Options struct { @@ -166,10 +166,10 @@ func (n *Node) Start(ctx context.Context) error { if err != nil { return err } - log.FeedbackInfo(ctx, fmt.Sprintf("Providing HTTP API at %s.", n.Server.Address())) + log.InfoContext(ctx, fmt.Sprintf("Providing HTTP API at %s.", n.Server.Address())) go func() { if err := n.Server.Serve(); err != nil && !errors.Is(err, gohttp.ErrServerClosed) { - log.FeedbackErrorE(ctx, "HTTP server stopped", err) + log.ErrorContextE(ctx, "HTTP server stopped", err) } }() } diff --git a/planner/create.go b/planner/create.go index a03c429da9..addf8e4d97 100644 --- a/planner/create.go +++ b/planner/create.go @@ -57,7 +57,7 @@ func (n *createNode) Kind() string { return "createNode" } func (n *createNode) Init() error { return nil } func (n *createNode) Start() error { - doc, err := client.NewDocFromMap(n.input, n.collection.Schema()) + doc, err := client.NewDocFromMap(n.input, n.collection.Definition()) if err != nil { n.err = err return err @@ -78,7 +78,10 @@ func (n *createNode) Next() (bool, error) { return false, nil } - if err := n.collection.WithTxn(n.p.txn).Create(n.p.ctx, n.doc); err != nil { + if err := n.collection.Create( + n.p.ctx, + n.doc, + ); err != nil { return false, err } diff --git a/planner/delete.go b/planner/delete.go index 63cdec9a6f..71313e9f5d 100644 --- a/planner/delete.go +++ b/planner/delete.go @@ -53,7 +53,10 @@ func (n *deleteNode) Next() (bool, error) { if err != nil { return false, err } - _, err = n.collection.DeleteWithDocID(n.p.ctx, docID) + _, err = n.collection.Delete( + n.p.ctx, + docID, + ) if err != nil { return false, err } @@ -136,7 +139,7 @@ func (p *Planner) DeleteDocs(parsed *mapper.Mutation) (planNode, error) { p: p, filter: parsed.Filter, docIDs: parsed.DocIDs.Value(), - collection: col.WithTxn(p.txn), + collection: col, source: slctNode, docMapper: docMapper{parsed.DocumentMapping}, }, nil diff --git a/planner/filter/complex.go b/planner/filter/complex.go index acc2de4883..ce72ead07e 100644 --- a/planner/filter/complex.go +++ b/planner/filter/complex.go @@ -17,7 +17,7 @@ import ( // IsComplex returns true if the provided filter is complex. // A filter is considered complex if it contains a relation -// object withing an _or or _not operator not necessarily being +// object withing an _or, _and or _not operator not necessarily being // its direct child. func IsComplex(filter *mapper.Filter) bool { if filter == nil { @@ -31,8 +31,8 @@ func isComplex(conditions any, seekRelation bool) bool { case map[connor.FilterKey]any: for k, v := range typedCond { if op, ok := k.(*mapper.Operator); ok { - if (op.Operation == request.FilterOpOr && len(v.([]any)) > 1) || - op.Operation == request.FilterOpNot { + switch op.Operation { + case request.FilterOpOr, request.FilterOpAnd, request.FilterOpNot: if isComplex(v, true) { return true } diff --git a/planner/filter/complex_test.go b/planner/filter/complex_test.go index f16055df74..208860501f 100644 --- a/planner/filter/complex_test.go +++ b/planner/filter/complex_test.go @@ -80,7 +80,7 @@ func TestIsComplex(t *testing.T) { inputFilter: r("_or", m("published", m("rating", m("_gt", 4.0))), ), - isComplex: false, + isComplex: true, }, { name: "relation inside _or", @@ -109,7 +109,7 @@ func TestIsComplex(t *testing.T) { m("published", m("rating", m("_gt", 4.0))), ), ), - isComplex: false, + isComplex: true, }, { name: "relation inside _and and _or", diff --git a/planner/mapper/mapper.go b/planner/mapper/mapper.go index 953d21ce17..af3542c403 100644 --- a/planner/mapper/mapper.go +++ b/planner/mapper/mapper.go @@ -33,13 +33,26 @@ var ( FilterEqOp = &Operator{Operation: "_eq"} ) +// SelectionType is the type of selection. +type SelectionType int + +const ( + ObjectSelection SelectionType = iota + CommitSelection +) + // ToSelect converts the given [parser.Select] into a [Select]. // // In the process of doing so it will construct the document map required to access the data // yielded by the [Select]. -func ToSelect(ctx context.Context, store client.Store, selectRequest *request.Select) (*Select, error) { +func ToSelect( + ctx context.Context, + store client.Store, + rootSelectType SelectionType, + selectRequest *request.Select, +) (*Select, error) { // the top-level select will always have index=0, and no parent collection name - return toSelect(ctx, store, 0, selectRequest, "") + return toSelect(ctx, store, rootSelectType, 0, selectRequest, "") } // toSelect converts the given [parser.Select] into a [Select]. @@ -49,28 +62,35 @@ func ToSelect(ctx context.Context, store client.Store, selectRequest *request.Se func toSelect( ctx context.Context, store client.Store, + rootSelectType SelectionType, thisIndex int, selectRequest *request.Select, parentCollectionName string, ) (*Select, error) { - collectionName, err := getCollectionName(ctx, store, selectRequest, parentCollectionName) + if rootSelectType == ObjectSelection && selectRequest.Name == request.VersionFieldName { + // WARNING: This is a weird quirk upon which some of the mapper code is dependent upon + // please remove it if/when you have chance to. + rootSelectType = CommitSelection + } + + collectionName, err := getCollectionName(ctx, store, rootSelectType, selectRequest, parentCollectionName) if err != nil { return nil, err } - mapping, definition, err := getTopLevelInfo(ctx, store, selectRequest, collectionName) + mapping, definition, err := getTopLevelInfo(ctx, store, rootSelectType, selectRequest, collectionName) if err != nil { return nil, err } - fields, aggregates, err := getRequestables(ctx, selectRequest, mapping, collectionName, store) + fields, aggregates, err := getRequestables(ctx, rootSelectType, selectRequest, mapping, collectionName, store) if err != nil { return nil, err } // Needs to be done before resolving aggregates, else filter conversion may fail there filterDependencies, err := resolveFilterDependencies( - ctx, store, collectionName, selectRequest.Filter, mapping, fields) + ctx, store, rootSelectType, collectionName, selectRequest.Filter, mapping, fields) if err != nil { return nil, err } @@ -78,7 +98,7 @@ func toSelect( // Resolve order dependencies that may have been missed due to not being rendered. err = resolveOrderDependencies( - ctx, store, collectionName, selectRequest.OrderBy, mapping, &fields) + ctx, store, rootSelectType, collectionName, selectRequest.OrderBy, mapping, &fields) if err != nil { return nil, err } @@ -86,7 +106,7 @@ func toSelect( aggregates = appendUnderlyingAggregates(aggregates, mapping) fields, err = resolveAggregates( ctx, - selectRequest, + rootSelectType, aggregates, fields, mapping, @@ -100,7 +120,15 @@ func toSelect( } if len(definition.Schema.Fields) != 0 { - fields, err = resolveSecondaryRelationIDs(ctx, store, collectionName, definition.Schema, mapping, fields) + fields, err = resolveSecondaryRelationIDs( + ctx, + store, + rootSelectType, + collectionName, + definition, + mapping, + fields, + ) if err != nil { return nil, err } @@ -146,6 +174,7 @@ func toSelect( func resolveOrderDependencies( ctx context.Context, store client.Store, + rootSelectType SelectionType, descName string, source immutable.Option[request.OrderBy], mapping *core.DocumentMapping, @@ -170,7 +199,15 @@ outer: joinField := fields[0] // ensure the child select is resolved for this order join - innerSelect, err := resolveChildOrder(ctx, store, descName, joinField, mapping, currentExistingFields) + innerSelect, err := resolveChildOrder( + ctx, + store, + rootSelectType, + descName, + joinField, + mapping, + currentExistingFields, + ) if err != nil { return err } @@ -188,7 +225,7 @@ outer: joinField := fields[0] // ensure the child select is resolved for this order join - innerSelect, err := resolveChildOrder(ctx, store, descName, joinField, mapping, existingFields) + innerSelect, err := resolveChildOrder(ctx, store, rootSelectType, descName, joinField, mapping, existingFields) if err != nil { return err } @@ -215,6 +252,7 @@ outer: func resolveChildOrder( ctx context.Context, store client.Store, + rootSelectType SelectionType, descName string, orderChildField string, mapping *core.DocumentMapping, @@ -232,7 +270,7 @@ func resolveChildOrder( Name: orderChildField, }, } - innerSelect, err := toSelect(ctx, store, index, &dummyJoinFieldSelect, descName) + innerSelect, err := toSelect(ctx, store, rootSelectType, index, &dummyJoinFieldSelect, descName) if err != nil { return nil, err } @@ -262,7 +300,7 @@ func resolveChildOrder( // updated with any new fields/aggregates. func resolveAggregates( ctx context.Context, - selectRequest *request.Select, + rootSelectType SelectionType, aggregates []*aggregateRequest, inputFields []Requestable, mapping *core.DocumentMapping, @@ -334,7 +372,6 @@ func resolveAggregates( index := mapping.GetNextIndex() hostSelectRequest := &request.Select{ - Root: selectRequest.Root, Field: request.Field{ Name: target.hostExternalName, }, @@ -344,24 +381,31 @@ func resolveAggregates( collectionName = "" } - childCollectionName, err := getCollectionName(ctx, store, hostSelectRequest, collectionName) + childCollectionName, err := getCollectionName(ctx, store, rootSelectType, hostSelectRequest, collectionName) if err != nil { return nil, err } - mapAggregateNestedTargets(target, hostSelectRequest, selectRequest.Root) + mapAggregateNestedTargets(target, hostSelectRequest) - childMapping, _, err := getTopLevelInfo(ctx, store, hostSelectRequest, childCollectionName) + childMapping, _, err := getTopLevelInfo(ctx, store, rootSelectType, hostSelectRequest, childCollectionName) if err != nil { return nil, err } - childFields, _, err := getRequestables(ctx, hostSelectRequest, childMapping, childCollectionName, store) + childFields, _, err := getRequestables( + ctx, + rootSelectType, + hostSelectRequest, + childMapping, + childCollectionName, + store, + ) if err != nil { return nil, err } err = resolveOrderDependencies( - ctx, store, childCollectionName, target.order, childMapping, &childFields) + ctx, store, rootSelectType, childCollectionName, target.order, childMapping, &childFields) if err != nil { return nil, err } @@ -373,6 +417,7 @@ func resolveAggregates( filterDependencies, err := resolveFilterDependencies( ctx, store, + rootSelectType, childCollectionName, target.filter, mapping.ChildMappings[index], @@ -481,13 +526,11 @@ func resolveAggregates( func mapAggregateNestedTargets( target *aggregateRequestTarget, hostSelectRequest *request.Select, - selectionType request.SelectionType, ) { if target.order.HasValue() { for _, cond := range target.order.Value().Conditions { if len(cond.Fields) > 1 { hostSelectRequest.Fields = append(hostSelectRequest.Fields, &request.Select{ - Root: selectionType, Field: request.Field{ Name: cond.Fields[0], }, @@ -503,7 +546,6 @@ func mapAggregateNestedTargets( for _, innerCond := range cond { if _, isMap := innerCond.(map[string]any); isMap { hostSelectRequest.Fields = append(hostSelectRequest.Fields, &request.Select{ - Root: selectionType, Field: request.Field{ Name: topKey, }, @@ -619,6 +661,7 @@ func appendIfNotExists( // consumed mapping data. func getRequestables( ctx context.Context, + rootSelectType SelectionType, selectRequest *request.Select, mapping *core.DocumentMapping, collectionName string, @@ -644,7 +687,7 @@ func getRequestables( case *request.Select: index := mapping.GetNextIndex() - innerSelect, err := toSelect(ctx, store, index, f, collectionName) + innerSelect, err := toSelect(ctx, store, rootSelectType, index, f, collectionName) if err != nil { return nil, nil, err } @@ -710,6 +753,7 @@ func getAggregateRequests(index int, aggregate *request.Aggregate) (aggregateReq func getCollectionName( ctx context.Context, store client.Store, + rootSelectType SelectionType, selectRequest *request.Select, parentCollectionName string, ) (string, error) { @@ -719,7 +763,7 @@ func getCollectionName( if selectRequest.Name == request.GroupFieldName { return parentCollectionName, nil - } else if selectRequest.Root == request.CommitSelection { + } else if rootSelectType == CommitSelection { return parentCollectionName, nil } @@ -730,10 +774,10 @@ func getCollectionName( } hostFieldDesc, parentHasField := parentCollection.Definition().GetFieldByName(selectRequest.Name) - if parentHasField && hostFieldDesc.RelationName != "" { + if parentHasField && hostFieldDesc.Kind.IsObject() { // If this field exists on the parent, and it is a child object // then this collection name is the collection name of the child. - return hostFieldDesc.Schema, nil + return hostFieldDesc.Kind.Underlying(), nil } } @@ -744,6 +788,7 @@ func getCollectionName( func getTopLevelInfo( ctx context.Context, store client.Store, + rootSelectType SelectionType, selectRequest *request.Select, collectionName string, ) (*core.DocumentMapping, client.CollectionDefinition, error) { @@ -755,7 +800,7 @@ func getTopLevelInfo( return mapping, client.CollectionDefinition{}, nil } - if selectRequest.Root == request.ObjectSelection { + if rootSelectType == ObjectSelection { var definition client.CollectionDefinition collection, err := store.GetCollectionByName(ctx, collectionName) if err != nil { @@ -834,6 +879,7 @@ func getTopLevelInfo( func resolveFilterDependencies( ctx context.Context, store client.Store, + rootSelectType SelectionType, parentCollectionName string, source immutable.Option[request.Filter], mapping *core.DocumentMapping, @@ -846,6 +892,7 @@ func resolveFilterDependencies( return resolveInnerFilterDependencies( ctx, store, + rootSelectType, parentCollectionName, source.Value().Conditions, mapping, @@ -857,6 +904,7 @@ func resolveFilterDependencies( func resolveInnerFilterDependencies( ctx context.Context, store client.Store, + rootSelectType SelectionType, parentCollectionName string, source map[string]any, mapping *core.DocumentMapping, @@ -872,6 +920,7 @@ func resolveInnerFilterDependencies( innerFields, err := resolveInnerFilterDependencies( ctx, store, + rootSelectType, parentCollectionName, innerFilter.(map[string]any), mapping, @@ -891,6 +940,7 @@ func resolveInnerFilterDependencies( innerFields, err := resolveInnerFilterDependencies( ctx, store, + rootSelectType, parentCollectionName, notFilter, mapping, @@ -934,7 +984,7 @@ func resolveInnerFilterDependencies( } } else { var err error - childSelect, err = constructEmptyJoin(ctx, store, parentCollectionName, mapping, key) + childSelect, err = constructEmptyJoin(ctx, store, rootSelectType, parentCollectionName, mapping, key) if err != nil { return nil, err } @@ -951,7 +1001,7 @@ func resolveInnerFilterDependencies( } dummyParsed := &request.Select{Field: request.Field{Name: key}} - childCollectionName, err := getCollectionName(ctx, store, dummyParsed, parentCollectionName) + childCollectionName, err := getCollectionName(ctx, store, rootSelectType, dummyParsed, parentCollectionName) if err != nil { return nil, err } @@ -959,6 +1009,7 @@ func resolveInnerFilterDependencies( childFields, err := resolveInnerFilterDependencies( ctx, store, + rootSelectType, childCollectionName, childFilter, childSelect.DocumentMapping, @@ -979,6 +1030,7 @@ func resolveInnerFilterDependencies( func constructEmptyJoin( ctx context.Context, store client.Store, + rootSelectType SelectionType, parentCollectionName string, parentMapping *core.DocumentMapping, name string, @@ -991,12 +1043,12 @@ func constructEmptyJoin( }, } - childCollectionName, err := getCollectionName(ctx, store, dummyParsed, parentCollectionName) + childCollectionName, err := getCollectionName(ctx, store, rootSelectType, dummyParsed, parentCollectionName) if err != nil { return nil, err } - childMapping, _, err := getTopLevelInfo(ctx, store, dummyParsed, childCollectionName) + childMapping, _, err := getTopLevelInfo(ctx, store, rootSelectType, dummyParsed, childCollectionName) if err != nil { return nil, err } @@ -1025,8 +1077,9 @@ func constructEmptyJoin( func resolveSecondaryRelationIDs( ctx context.Context, store client.Store, + rootSelectType SelectionType, collectionName string, - schema client.SchemaDescription, + schema client.CollectionDefinition, mapping *core.DocumentMapping, requestables []Requestable, ) ([]Requestable, error) { @@ -1064,6 +1117,7 @@ func resolveSecondaryRelationIDs( join, err := constructEmptyJoin( ctx, store, + rootSelectType, collectionName, mapping, objectFieldName, @@ -1088,7 +1142,7 @@ func ToCommitSelect( store client.Store, selectRequest *request.CommitSelect, ) (*CommitSelect, error) { - underlyingSelect, err := ToSelect(ctx, store, selectRequest.ToSelect()) + underlyingSelect, err := ToSelect(ctx, store, CommitSelection, selectRequest.ToSelect()) if err != nil { return nil, err } @@ -1097,7 +1151,7 @@ func ToCommitSelect( DocID: selectRequest.DocID, FieldID: selectRequest.FieldID, Depth: selectRequest.Depth, - Cid: selectRequest.Cid, + Cid: selectRequest.CID, }, nil } @@ -1106,7 +1160,7 @@ func ToCommitSelect( // In the process of doing so it will construct the document map required to access the data // yielded by the [Select] embedded in the [Mutation]. func ToMutation(ctx context.Context, store client.Store, mutationRequest *request.ObjectMutation) (*Mutation, error) { - underlyingSelect, err := ToSelect(ctx, store, mutationRequest.ToSelect()) + underlyingSelect, err := ToSelect(ctx, store, ObjectSelection, mutationRequest.ToSelect()) if err != nil { return nil, err } diff --git a/planner/planner.go b/planner/planner.go index 0629076924..2a181ce621 100644 --- a/planner/planner.go +++ b/planner/planner.go @@ -13,6 +13,10 @@ package planner import ( "context" + "github.com/sourcenetwork/immutable" + + "github.com/sourcenetwork/defradb/acp" + acpIdentity "github.com/sourcenetwork/defradb/acp/identity" "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/client/request" "github.com/sourcenetwork/defradb/connor" @@ -82,17 +86,27 @@ type PlanContext struct { // Planner combines session state and database state to // produce a request plan, which is run by the execution context. type Planner struct { - txn datastore.Txn - db client.Store + txn datastore.Txn + identity immutable.Option[acpIdentity.Identity] + acp immutable.Option[acp.ACP] + db client.Store ctx context.Context } -func New(ctx context.Context, db client.Store, txn datastore.Txn) *Planner { +func New( + ctx context.Context, + identity immutable.Option[acpIdentity.Identity], + acp immutable.Option[acp.ACP], + db client.Store, + txn datastore.Txn, +) *Planner { return &Planner{ - txn: txn, - db: db, - ctx: ctx, + txn: txn, + identity: identity, + acp: acp, + db: db, + ctx: ctx, } } @@ -114,7 +128,7 @@ func (p *Planner) newPlan(stmt any) (planNode, error) { return p.newPlan(n.Selections[0]) case *request.Select: - m, err := mapper.ToSelect(p.ctx, p.db, n) + m, err := mapper.ToSelect(p.ctx, p.db, mapper.ObjectSelection, n) if err != nil { return nil, err } @@ -347,7 +361,7 @@ func (p *Planner) tryOptimizeJoinDirection(node *invertibleTypeJoin, parentPlan desc := slct.collection.Description() for subFieldName, subFieldInd := range filteredSubFields { indexes := desc.GetIndexesOnField(subFieldName) - if len(indexes) > 0 { + if len(indexes) > 0 && !filter.IsComplex(parentPlan.selectNode.filter) { subInd := node.documentMapping.FirstIndexOfName(node.subTypeName) relatedField := mapper.Field{Name: node.subTypeName, Index: subInd} fieldFilter := filter.UnwrapRelation(filter.CopyField( diff --git a/planner/scan.go b/planner/scan.go index 3ba0dd03cb..e52b3c2131 100644 --- a/planner/scan.go +++ b/planner/scan.go @@ -64,7 +64,9 @@ func (n *scanNode) Init() error { // init the fetcher if err := n.fetcher.Init( n.p.ctx, + n.p.identity, n.p.txn, + n.p.acp, n.col, n.fields, n.filter, diff --git a/planner/type_join.go b/planner/type_join.go index 915a2d128f..f93a8fe7db 100644 --- a/planner/type_join.go +++ b/planner/type_join.go @@ -80,14 +80,14 @@ func (p *Planner) makeTypeIndexJoin( var joinPlan planNode var err error - typeFieldDesc, ok := parent.collection.Schema().GetFieldByName(subType.Name) + typeFieldDesc, ok := parent.collection.Definition().GetFieldByName(subType.Name) if !ok { return nil, client.NewErrFieldNotExist(subType.Name) } - if typeFieldDesc.Kind == client.FieldKind_FOREIGN_OBJECT { // One-to-One, or One side of One-to-Many + if typeFieldDesc.Kind.IsObject() && !typeFieldDesc.Kind.IsArray() { // One-to-One, or One side of One-to-Many joinPlan, err = p.makeTypeJoinOne(parent, source, subType) - } else if typeFieldDesc.Kind == client.FieldKind_FOREIGN_OBJECT_ARRAY { // Many side of One-to-Many + } else if typeFieldDesc.Kind.IsObjectArray() { // Many side of One-to-Many joinPlan, err = p.makeTypeJoinMany(parent, source, subType) } else { // more to come, Many-to-Many, Embedded? return nil, ErrUnknownRelationType @@ -239,7 +239,7 @@ func (p *Planner) makeTypeJoinOne( } // get the correct sub field schema type (collection) - subTypeFieldDesc, ok := parent.collection.Schema().GetFieldByName(subType.Name) + subTypeFieldDesc, ok := parent.collection.Definition().GetFieldByName(subType.Name) if !ok { return nil, client.NewErrFieldNotExist(subType.Name) } @@ -248,13 +248,11 @@ func (p *Planner) makeTypeJoinOne( if err != nil { return nil, err } - subTypeSchema := subTypeCol.Schema() subTypeField, subTypeFieldNameFound := subTypeCol.Description().GetFieldByRelation( subTypeFieldDesc.RelationName, parent.collection.Name().Value(), subTypeFieldDesc.Name, - &subTypeSchema, ) if !subTypeFieldNameFound { return nil, client.NewErrFieldNotExist(subTypeFieldDesc.RelationName) @@ -271,7 +269,7 @@ func (p *Planner) makeTypeJoinOne( dir := joinDirection{ firstNode: source, secondNode: selectPlan, - secondaryField: subTypeField.Name + request.RelatedObjectID, + secondaryField: immutable.Some(subTypeField.Name + request.RelatedObjectID), primaryField: subTypeFieldDesc.Name + request.RelatedObjectID, } @@ -281,7 +279,8 @@ func (p *Planner) makeTypeJoinOne( root: source, subType: selectPlan, subSelect: subType, - rootName: subTypeField.Name, + subSelectFieldDef: subTypeFieldDesc, + rootName: immutable.Some(subTypeField.Name), subTypeName: subType.Name, isSecondary: !subTypeFieldDesc.IsPrimaryRelation, secondaryFieldIndex: secondaryFieldIndex, @@ -347,7 +346,7 @@ func prepareScanNodeFilterForTypeJoin( parent.filter.Conditions = filter.Merge( parent.filter.Conditions, scan.filter.Conditions) } - filter.RemoveField(scan.filter, subType.Field) + scan.filter = nil } else { var parentFilter *mapper.Filter scan.filter, parentFilter = filter.SplitByFields(scan.filter, subType.Field) @@ -374,7 +373,7 @@ func (p *Planner) makeTypeJoinMany( return nil, err } - subTypeFieldDesc, ok := parent.collection.Schema().GetFieldByName(subType.Name) + subTypeFieldDesc, ok := parent.collection.Definition().GetFieldByName(subType.Name) if !ok { return nil, client.NewErrFieldNotExist(subType.Name) } @@ -383,23 +382,25 @@ func (p *Planner) makeTypeJoinMany( if err != nil { return nil, err } - subTypeSchema := subTypeCol.Schema() - rootField, rootNameFound := subTypeCol.Description().GetFieldByRelation( - subTypeFieldDesc.RelationName, - parent.collection.Name().Value(), - subTypeFieldDesc.Name, - &subTypeSchema, - ) - - if !rootNameFound { - return nil, client.NewErrFieldNotExist(subTypeFieldDesc.RelationName) + var secondaryFieldName immutable.Option[string] + var rootName immutable.Option[string] + if subTypeFieldDesc.RelationName != "" { + rootField, rootNameFound := subTypeCol.Description().GetFieldByRelation( + subTypeFieldDesc.RelationName, + parent.collection.Name().Value(), + subTypeFieldDesc.Name, + ) + if rootNameFound { + rootName = immutable.Some(rootField.Name) + secondaryFieldName = immutable.Some(rootField.Name + request.RelatedObjectID) + } } dir := joinDirection{ firstNode: source, secondNode: selectPlan, - secondaryField: rootField.Name + request.RelatedObjectID, + secondaryField: secondaryFieldName, primaryField: subTypeFieldDesc.Name + request.RelatedObjectID, } @@ -409,7 +410,8 @@ func (p *Planner) makeTypeJoinMany( root: source, subType: selectPlan, subSelect: subType, - rootName: rootField.Name, + subSelectFieldDef: subTypeFieldDesc, + rootName: rootName, isSecondary: true, subTypeName: subType.Name, secondaryFetchLimit: 0, @@ -457,32 +459,39 @@ func fetchPrimaryDoc(node, subNode planNode, parentProp string) (bool, error) { type joinDirection struct { firstNode planNode secondNode planNode - secondaryField string + secondaryField immutable.Option[string] primaryField string isInverted bool } func (dir *joinDirection) invert() { + if !dir.secondaryField.HasValue() { + // If the secondary field has no value it cannot be inverted + return + } dir.isInverted = !dir.isInverted dir.firstNode, dir.secondNode = dir.secondNode, dir.firstNode - dir.secondaryField, dir.primaryField = dir.primaryField, dir.secondaryField + dir.secondaryField, dir.primaryField = immutable.Some(dir.primaryField), dir.secondaryField.Value() } type invertibleTypeJoin struct { - documentIterator docMapper root planNode subType planNode - rootName string + rootName immutable.Option[string] subTypeName string - subSelect *mapper.Select + subSelect *mapper.Select + subSelectFieldDef client.FieldDefinition isSecondary bool secondaryFieldIndex immutable.Option[int] secondaryFetchLimit uint + // docsToYield contains documents read and ready to be yielded by this node. + docsToYield []core.Doc + dir joinDirection } @@ -552,6 +561,17 @@ func (join *invertibleTypeJoin) processSecondResult(secondDocs []core.Doc) (any, } func (join *invertibleTypeJoin) Next() (bool, error) { + if len(join.docsToYield) > 0 { + // If there is one or more documents in the queue, drop the first one - + // it will have been yielded by the last `Next()` call. + join.docsToYield = join.docsToYield[1:] + if len(join.docsToYield) > 0 { + // If there are still documents in the queue, return true yielding the next + // one in the queue. + return true, nil + } + } + hasFirstValue, err := join.dir.firstNode.Next() if err != nil || !hasFirstValue { @@ -563,7 +583,9 @@ func (join *invertibleTypeJoin) Next() (bool, error) { if join.isSecondary { secondDocs, err := fetchDocsWithFieldValue( join.dir.secondNode, - join.dir.secondaryField, + // As the join is from the secondary field, we know that [join.dir.secondaryField] must have a value + // otherwise the user would not have been able to request it. + join.dir.secondaryField.Value(), firstDoc.GetID(), join.secondaryFetchLimit, ) @@ -571,7 +593,14 @@ func (join *invertibleTypeJoin) Next() (bool, error) { return false, err } if join.dir.secondNode == join.root { - join.root.Value().Fields[join.subSelect.Index] = join.subType.Value() + if len(secondDocs) == 0 { + return false, nil + } + for i := range secondDocs { + secondDocs[i].Fields[join.subSelect.Index] = join.subType.Value() + } + join.docsToYield = append(join.docsToYield, secondDocs...) + return true, nil } else { secondResult, secondIDResult := join.processSecondResult(secondDocs) join.dir.firstNode.Value().Fields[join.subSelect.Index] = secondResult @@ -590,17 +619,32 @@ func (join *invertibleTypeJoin) Next() (bool, error) { } } - join.currentValue = join.root.Value() + join.docsToYield = append(join.docsToYield, join.root.Value()) return true, nil } +func (join *invertibleTypeJoin) Value() core.Doc { + if len(join.docsToYield) == 0 { + return core.Doc{} + } + return join.docsToYield[0] +} + func (join *invertibleTypeJoin) invertJoinDirectionWithIndex( fieldFilter *mapper.Filter, index client.IndexDescription, ) error { + if !join.rootName.HasValue() { + // If the root field has no value it cannot be inverted + return nil + } + if join.subSelectFieldDef.Kind.IsArray() { + // invertibleTypeJoin does not support inverting one-many relations atm + return nil + } subScan := getScanNode(join.subType) - subScan.tryAddField(join.rootName + request.RelatedObjectID) + subScan.tryAddField(join.rootName.Value() + request.RelatedObjectID) subScan.filter = fieldFilter subScan.initFetcher(immutable.Option[string]{}, immutable.Some(index)) diff --git a/planner/update.go b/planner/update.go index 077ceb39e4..b93934c136 100644 --- a/planner/update.go +++ b/planner/update.go @@ -11,8 +11,6 @@ package planner import ( - "encoding/json" - "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/client/request" "github.com/sourcenetwork/defradb/core" @@ -64,15 +62,21 @@ func (n *updateNode) Next() (bool, error) { } n.currentValue = n.results.Value() + docID, err := client.NewDocIDFromString(n.currentValue.GetID()) if err != nil { return false, err } - patch, err := json.Marshal(n.input) + doc, err := n.collection.Get(n.p.ctx, docID, false) if err != nil { return false, err } - _, err = n.collection.UpdateWithDocID(n.p.ctx, docID, string(patch)) + for k, v := range n.input { + if err := doc.Set(k, v); err != nil { + return false, err + } + } + err = n.collection.Update(n.p.ctx, doc) if err != nil { return false, err } @@ -169,7 +173,7 @@ func (p *Planner) UpdateDocs(parsed *mapper.Mutation) (planNode, error) { if err != nil { return nil, err } - update.collection = col.WithTxn(p.txn) + update.collection = col // create the results Select node resultsNode, err := p.Select(&parsed.Select) diff --git a/planner/view.go b/planner/view.go index f02de06d27..2bb5f94fa8 100644 --- a/planner/view.go +++ b/planner/view.go @@ -33,7 +33,7 @@ func (p *Planner) View(query *mapper.Select, col client.Collection) (planNode, e querySource := (col.Description().Sources[0].(*client.QuerySource)) hasTransform := querySource.Transform.HasValue() - m, err := mapper.ToSelect(p.ctx, p.db, &querySource.Query) + m, err := mapper.ToSelect(p.ctx, p.db, mapper.ObjectSelection, &querySource.Query) if err != nil { return nil, err } diff --git a/playground/package-lock.json b/playground/package-lock.json index e01bcfbc4d..8bb2cafd7b 100644 --- a/playground/package-lock.json +++ b/playground/package-lock.json @@ -8,39 +8,30 @@ "name": "playground", "version": "0.0.0", "dependencies": { - "graphiql": "^3.1.1", + "graphiql": "^3.2.0", "graphql": "^16.8.1", - "react": "^18.2.0", - "react-dom": "^18.2.0", - "swagger-ui-react": "^5.11.9" + "react": "^18.3.1", + "react-dom": "^18.3.1", + "swagger-ui-react": "^5.17.2" }, "devDependencies": { - "@types/react": "^18.2.61", - "@types/react-dom": "^18.2.18", + "@types/react": "^18.3.1", + "@types/react-dom": "^18.3.0", "@types/swagger-ui-react": "^4.18.3", - "@typescript-eslint/eslint-plugin": "^7.1.0", - "@typescript-eslint/parser": "^7.1.0", + "@typescript-eslint/eslint-plugin": "^7.7.1", + "@typescript-eslint/parser": "^7.7.1", "@vitejs/plugin-react-swc": "^3.6.0", "eslint": "^8.57.0", - "eslint-plugin-react-hooks": "^4.6.0", - "eslint-plugin-react-refresh": "^0.4.5", - "typescript": "^5.3.3", - "vite": "^5.1.4" - } - }, - "node_modules/@aashutoshrathi/word-wrap": { - "version": "1.2.6", - "resolved": "https://registry.npmjs.org/@aashutoshrathi/word-wrap/-/word-wrap-1.2.6.tgz", - "integrity": "sha512-1Yjs2SvM8TflER/OD3cOjhWWOZb58A2t7wpE2S9XfBYTiIl+XFhQG2bjy4Pu1I+EAlCNUzRDYDdFwFYUKvXcIA==", - "dev": true, - "engines": { - "node": ">=0.10.0" + "eslint-plugin-react-hooks": "^4.6.2", + "eslint-plugin-react-refresh": "^0.4.6", + "typescript": "^5.4.5", + "vite": "^5.2.10" } }, "node_modules/@babel/runtime": { - "version": "7.24.0", - "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.24.0.tgz", - "integrity": "sha512-Chk32uHMg6TnQdvw2e9IlqPpFX/6NLuK0Ys2PqLb7/gL5uFn9mXvK715FGLlOLQrcO4qIkNHkvPGktzzXexsFw==", + "version": "7.24.5", + "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.24.5.tgz", + "integrity": "sha512-Nms86NXrsaeU9vbBJKni6gXiEXZ4CVpYVzEjDH9Sb8vmZ3UljyA1GSOJl/6LGPO8EHLuSF9H+IxNXHPX8QHJ4g==", "dependencies": { "regenerator-runtime": "^0.14.0" }, @@ -49,9 +40,9 @@ } }, "node_modules/@babel/runtime-corejs3": { - "version": "7.24.0", - "resolved": "https://registry.npmjs.org/@babel/runtime-corejs3/-/runtime-corejs3-7.24.0.tgz", - "integrity": "sha512-HxiRMOncx3ly6f3fcZ1GVKf+/EROcI9qwPgmij8Czqy6Okm/0T37T4y2ZIlLUuEUFjtM7NRsfdCO8Y3tAiJZew==", + "version": "7.24.5", + "resolved": "https://registry.npmjs.org/@babel/runtime-corejs3/-/runtime-corejs3-7.24.5.tgz", + "integrity": "sha512-GWO0mgzNMLWaSYM4z4NVIuY0Cd1fl8cPnuetuddu5w/qGuvt5Y7oUi/kvvQGK9xgOkFJDQX2heIvTRn/OQ1XTg==", "dependencies": { "core-js-pure": "^3.30.2", "regenerator-runtime": "^0.14.0" @@ -61,9 +52,9 @@ } }, "node_modules/@braintree/sanitize-url": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/@braintree/sanitize-url/-/sanitize-url-7.0.0.tgz", - "integrity": "sha512-GMu2OJiTd1HSe74bbJYQnVvELANpYiGFZELyyTM1CR0sdv5ReQAcJ/c/8pIrPab3lO11+D+EpuGLUxqz+y832g==" + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/@braintree/sanitize-url/-/sanitize-url-7.0.1.tgz", + "integrity": "sha512-URg8UM6lfC9ZYqFipItRSxYJdgpU5d2Z4KnjsJ+rj6tgAmGme7E+PQNCiud8g0HDaZKMovu2qjfa0f5Ge0Vlsg==" }, "node_modules/@codemirror/language": { "version": "6.0.0", @@ -86,9 +77,9 @@ "peer": true }, "node_modules/@codemirror/view": { - "version": "6.25.1", - "resolved": "https://registry.npmjs.org/@codemirror/view/-/view-6.25.1.tgz", - "integrity": "sha512-2LXLxsQnHDdfGzDvjzAwZh2ZviNJm7im6tGpa0IONIDnFd8RZ80D2SNi8PDi6YjKcMoMRK20v6OmKIdsrwsyoQ==", + "version": "6.26.3", + "resolved": "https://registry.npmjs.org/@codemirror/view/-/view-6.26.3.tgz", + "integrity": "sha512-gmqxkPALZjkgSxIeeweY/wGQXBfwTUaLs8h7OKtSwfbj9Ct3L11lD+u1sS7XHppxFQoMDiMDp07P9f3I2jWOHw==", "peer": true, "dependencies": { "@codemirror/state": "^6.4.0", @@ -112,9 +103,9 @@ "optional": true }, "node_modules/@esbuild/aix-ppc64": { - "version": "0.19.12", - "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.19.12.tgz", - "integrity": "sha512-bmoCYyWdEL3wDQIVbcyzRyeKLgk2WtWLTWz1ZIAZF/EGbNOwSA6ew3PftJ1PqMiOOGu0OyFMzG53L0zqIpPeNA==", + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.20.2.tgz", + "integrity": "sha512-D+EBOJHXdNZcLJRBkhENNG8Wji2kgc9AZ9KiPr1JuZjsNtyHzrsfLRrY0tk2H2aoFu6RANO1y1iPPUCDYWkb5g==", "cpu": [ "ppc64" ], @@ -128,9 +119,9 @@ } }, "node_modules/@esbuild/android-arm": { - "version": "0.19.12", - "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.19.12.tgz", - "integrity": "sha512-qg/Lj1mu3CdQlDEEiWrlC4eaPZ1KztwGJ9B6J+/6G+/4ewxJg7gqj8eVYWvao1bXrqGiW2rsBZFSX3q2lcW05w==", + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.20.2.tgz", + "integrity": "sha512-t98Ra6pw2VaDhqNWO2Oph2LXbz/EJcnLmKLGBJwEwXX/JAN83Fym1rU8l0JUWK6HkIbWONCSSatf4sf2NBRx/w==", "cpu": [ "arm" ], @@ -144,9 +135,9 @@ } }, "node_modules/@esbuild/android-arm64": { - "version": "0.19.12", - "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.19.12.tgz", - "integrity": "sha512-P0UVNGIienjZv3f5zq0DP3Nt2IE/3plFzuaS96vihvD0Hd6H/q4WXUGpCxD/E8YrSXfNyRPbpTq+T8ZQioSuPA==", + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.20.2.tgz", + "integrity": "sha512-mRzjLacRtl/tWU0SvD8lUEwb61yP9cqQo6noDZP/O8VkwafSYwZ4yWy24kan8jE/IMERpYncRt2dw438LP3Xmg==", "cpu": [ "arm64" ], @@ -160,9 +151,9 @@ } }, "node_modules/@esbuild/android-x64": { - "version": "0.19.12", - "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.19.12.tgz", - "integrity": "sha512-3k7ZoUW6Q6YqhdhIaq/WZ7HwBpnFBlW905Fa4s4qWJyiNOgT1dOqDiVAQFwBH7gBRZr17gLrlFCRzF6jFh7Kew==", + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.20.2.tgz", + "integrity": "sha512-btzExgV+/lMGDDa194CcUQm53ncxzeBrWJcncOBxuC6ndBkKxnHdFJn86mCIgTELsooUmwUm9FkhSp5HYu00Rg==", "cpu": [ "x64" ], @@ -176,9 +167,9 @@ } }, "node_modules/@esbuild/darwin-arm64": { - "version": "0.19.12", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.19.12.tgz", - "integrity": "sha512-B6IeSgZgtEzGC42jsI+YYu9Z3HKRxp8ZT3cqhvliEHovq8HSX2YX8lNocDn79gCKJXOSaEot9MVYky7AKjCs8g==", + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.20.2.tgz", + "integrity": "sha512-4J6IRT+10J3aJH3l1yzEg9y3wkTDgDk7TSDFX+wKFiWjqWp/iCfLIYzGyasx9l0SAFPT1HwSCR+0w/h1ES/MjA==", "cpu": [ "arm64" ], @@ -192,9 +183,9 @@ } }, "node_modules/@esbuild/darwin-x64": { - "version": "0.19.12", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.19.12.tgz", - "integrity": "sha512-hKoVkKzFiToTgn+41qGhsUJXFlIjxI/jSYeZf3ugemDYZldIXIxhvwN6erJGlX4t5h417iFuheZ7l+YVn05N3A==", + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.20.2.tgz", + "integrity": "sha512-tBcXp9KNphnNH0dfhv8KYkZhjc+H3XBkF5DKtswJblV7KlT9EI2+jeA8DgBjp908WEuYll6pF+UStUCfEpdysA==", "cpu": [ "x64" ], @@ -208,9 +199,9 @@ } }, "node_modules/@esbuild/freebsd-arm64": { - "version": "0.19.12", - "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.19.12.tgz", - "integrity": "sha512-4aRvFIXmwAcDBw9AueDQ2YnGmz5L6obe5kmPT8Vd+/+x/JMVKCgdcRwH6APrbpNXsPz+K653Qg8HB/oXvXVukA==", + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.20.2.tgz", + "integrity": "sha512-d3qI41G4SuLiCGCFGUrKsSeTXyWG6yem1KcGZVS+3FYlYhtNoNgYrWcvkOoaqMhwXSMrZRl69ArHsGJ9mYdbbw==", "cpu": [ "arm64" ], @@ -224,9 +215,9 @@ } }, "node_modules/@esbuild/freebsd-x64": { - "version": "0.19.12", - "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.19.12.tgz", - "integrity": "sha512-EYoXZ4d8xtBoVN7CEwWY2IN4ho76xjYXqSXMNccFSx2lgqOG/1TBPW0yPx1bJZk94qu3tX0fycJeeQsKovA8gg==", + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.20.2.tgz", + "integrity": "sha512-d+DipyvHRuqEeM5zDivKV1KuXn9WeRX6vqSqIDgwIfPQtwMP4jaDsQsDncjTDDsExT4lR/91OLjRo8bmC1e+Cw==", "cpu": [ "x64" ], @@ -240,9 +231,9 @@ } }, "node_modules/@esbuild/linux-arm": { - "version": "0.19.12", - "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.19.12.tgz", - "integrity": "sha512-J5jPms//KhSNv+LO1S1TX1UWp1ucM6N6XuL6ITdKWElCu8wXP72l9MM0zDTzzeikVyqFE6U8YAV9/tFyj0ti+w==", + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.20.2.tgz", + "integrity": "sha512-VhLPeR8HTMPccbuWWcEUD1Az68TqaTYyj6nfE4QByZIQEQVWBB8vup8PpR7y1QHL3CpcF6xd5WVBU/+SBEvGTg==", "cpu": [ "arm" ], @@ -256,9 +247,9 @@ } }, "node_modules/@esbuild/linux-arm64": { - "version": "0.19.12", - "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.19.12.tgz", - "integrity": "sha512-EoTjyYyLuVPfdPLsGVVVC8a0p1BFFvtpQDB/YLEhaXyf/5bczaGeN15QkR+O4S5LeJ92Tqotve7i1jn35qwvdA==", + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.20.2.tgz", + "integrity": "sha512-9pb6rBjGvTFNira2FLIWqDk/uaf42sSyLE8j1rnUpuzsODBq7FvpwHYZxQ/It/8b+QOS1RYfqgGFNLRI+qlq2A==", "cpu": [ "arm64" ], @@ -272,9 +263,9 @@ } }, "node_modules/@esbuild/linux-ia32": { - "version": "0.19.12", - "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.19.12.tgz", - "integrity": "sha512-Thsa42rrP1+UIGaWz47uydHSBOgTUnwBwNq59khgIwktK6x60Hivfbux9iNR0eHCHzOLjLMLfUMLCypBkZXMHA==", + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.20.2.tgz", + "integrity": "sha512-o10utieEkNPFDZFQm9CoP7Tvb33UutoJqg3qKf1PWVeeJhJw0Q347PxMvBgVVFgouYLGIhFYG0UGdBumROyiig==", "cpu": [ "ia32" ], @@ -288,9 +279,9 @@ } }, "node_modules/@esbuild/linux-loong64": { - "version": "0.19.12", - "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.19.12.tgz", - "integrity": "sha512-LiXdXA0s3IqRRjm6rV6XaWATScKAXjI4R4LoDlvO7+yQqFdlr1Bax62sRwkVvRIrwXxvtYEHHI4dm50jAXkuAA==", + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.20.2.tgz", + "integrity": "sha512-PR7sp6R/UC4CFVomVINKJ80pMFlfDfMQMYynX7t1tNTeivQ6XdX5r2XovMmha/VjR1YN/HgHWsVcTRIMkymrgQ==", "cpu": [ "loong64" ], @@ -304,9 +295,9 @@ } }, "node_modules/@esbuild/linux-mips64el": { - "version": "0.19.12", - "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.19.12.tgz", - "integrity": "sha512-fEnAuj5VGTanfJ07ff0gOA6IPsvrVHLVb6Lyd1g2/ed67oU1eFzL0r9WL7ZzscD+/N6i3dWumGE1Un4f7Amf+w==", + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.20.2.tgz", + "integrity": "sha512-4BlTqeutE/KnOiTG5Y6Sb/Hw6hsBOZapOVF6njAESHInhlQAghVVZL1ZpIctBOoTFbQyGW+LsVYZ8lSSB3wkjA==", "cpu": [ "mips64el" ], @@ -320,9 +311,9 @@ } }, "node_modules/@esbuild/linux-ppc64": { - "version": "0.19.12", - "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.19.12.tgz", - "integrity": "sha512-nYJA2/QPimDQOh1rKWedNOe3Gfc8PabU7HT3iXWtNUbRzXS9+vgB0Fjaqr//XNbd82mCxHzik2qotuI89cfixg==", + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.20.2.tgz", + "integrity": "sha512-rD3KsaDprDcfajSKdn25ooz5J5/fWBylaaXkuotBDGnMnDP1Uv5DLAN/45qfnf3JDYyJv/ytGHQaziHUdyzaAg==", "cpu": [ "ppc64" ], @@ -336,9 +327,9 @@ } }, "node_modules/@esbuild/linux-riscv64": { - "version": "0.19.12", - "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.19.12.tgz", - "integrity": "sha512-2MueBrlPQCw5dVJJpQdUYgeqIzDQgw3QtiAHUC4RBz9FXPrskyyU3VI1hw7C0BSKB9OduwSJ79FTCqtGMWqJHg==", + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.20.2.tgz", + "integrity": "sha512-snwmBKacKmwTMmhLlz/3aH1Q9T8v45bKYGE3j26TsaOVtjIag4wLfWSiZykXzXuE1kbCE+zJRmwp+ZbIHinnVg==", "cpu": [ "riscv64" ], @@ -352,9 +343,9 @@ } }, "node_modules/@esbuild/linux-s390x": { - "version": "0.19.12", - "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.19.12.tgz", - "integrity": "sha512-+Pil1Nv3Umes4m3AZKqA2anfhJiVmNCYkPchwFJNEJN5QxmTs1uzyy4TvmDrCRNT2ApwSari7ZIgrPeUx4UZDg==", + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.20.2.tgz", + "integrity": "sha512-wcWISOobRWNm3cezm5HOZcYz1sKoHLd8VL1dl309DiixxVFoFe/o8HnwuIwn6sXre88Nwj+VwZUvJf4AFxkyrQ==", "cpu": [ "s390x" ], @@ -368,9 +359,9 @@ } }, "node_modules/@esbuild/linux-x64": { - "version": "0.19.12", - "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.19.12.tgz", - "integrity": "sha512-B71g1QpxfwBvNrfyJdVDexenDIt1CiDN1TIXLbhOw0KhJzE78KIFGX6OJ9MrtC0oOqMWf+0xop4qEU8JrJTwCg==", + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.20.2.tgz", + "integrity": "sha512-1MdwI6OOTsfQfek8sLwgyjOXAu+wKhLEoaOLTjbijk6E2WONYpH9ZU2mNtR+lZ2B4uwr+usqGuVfFT9tMtGvGw==", "cpu": [ "x64" ], @@ -384,9 +375,9 @@ } }, "node_modules/@esbuild/netbsd-x64": { - "version": "0.19.12", - "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.19.12.tgz", - "integrity": "sha512-3ltjQ7n1owJgFbuC61Oj++XhtzmymoCihNFgT84UAmJnxJfm4sYCiSLTXZtE00VWYpPMYc+ZQmB6xbSdVh0JWA==", + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.20.2.tgz", + "integrity": "sha512-K8/DhBxcVQkzYc43yJXDSyjlFeHQJBiowJ0uVL6Tor3jGQfSGHNNJcWxNbOI8v5k82prYqzPuwkzHt3J1T1iZQ==", "cpu": [ "x64" ], @@ -400,9 +391,9 @@ } }, "node_modules/@esbuild/openbsd-x64": { - "version": "0.19.12", - "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.19.12.tgz", - "integrity": "sha512-RbrfTB9SWsr0kWmb9srfF+L933uMDdu9BIzdA7os2t0TXhCRjrQyCeOt6wVxr79CKD4c+p+YhCj31HBkYcXebw==", + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.20.2.tgz", + "integrity": "sha512-eMpKlV0SThJmmJgiVyN9jTPJ2VBPquf6Kt/nAoo6DgHAoN57K15ZghiHaMvqjCye/uU4X5u3YSMgVBI1h3vKrQ==", "cpu": [ "x64" ], @@ -416,9 +407,9 @@ } }, "node_modules/@esbuild/sunos-x64": { - "version": "0.19.12", - "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.19.12.tgz", - "integrity": "sha512-HKjJwRrW8uWtCQnQOz9qcU3mUZhTUQvi56Q8DPTLLB+DawoiQdjsYq+j+D3s9I8VFtDr+F9CjgXKKC4ss89IeA==", + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.20.2.tgz", + "integrity": "sha512-2UyFtRC6cXLyejf/YEld4Hajo7UHILetzE1vsRcGL3earZEW77JxrFjH4Ez2qaTiEfMgAXxfAZCm1fvM/G/o8w==", "cpu": [ "x64" ], @@ -432,9 +423,9 @@ } }, "node_modules/@esbuild/win32-arm64": { - "version": "0.19.12", - "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.19.12.tgz", - "integrity": "sha512-URgtR1dJnmGvX864pn1B2YUYNzjmXkuJOIqG2HdU62MVS4EHpU2946OZoTMnRUHklGtJdJZ33QfzdjGACXhn1A==", + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.20.2.tgz", + "integrity": "sha512-GRibxoawM9ZCnDxnP3usoUDO9vUkpAxIIZ6GQI+IlVmr5kP3zUq+l17xELTHMWTWzjxa2guPNyrpq1GWmPvcGQ==", "cpu": [ "arm64" ], @@ -448,9 +439,9 @@ } }, "node_modules/@esbuild/win32-ia32": { - "version": "0.19.12", - "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.19.12.tgz", - "integrity": "sha512-+ZOE6pUkMOJfmxmBZElNOx72NKpIa/HFOMGzu8fqzQJ5kgf6aTGrcJaFsNiVMH4JKpMipyK+7k0n2UXN7a8YKQ==", + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.20.2.tgz", + "integrity": "sha512-HfLOfn9YWmkSKRQqovpnITazdtquEW8/SoHW7pWpuEeguaZI4QnCRW6b+oZTztdBnZOS2hqJ6im/D5cPzBTTlQ==", "cpu": [ "ia32" ], @@ -464,9 +455,9 @@ } }, "node_modules/@esbuild/win32-x64": { - "version": "0.19.12", - "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.19.12.tgz", - "integrity": "sha512-T1QyPSDCyMXaO3pzBkF96E8xMkiRYbUEZADd29SyPGabqxMViNoii+NcK7eWJAEoU6RZyEm5lVSIjTmcdoB9HA==", + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.20.2.tgz", + "integrity": "sha512-N49X4lJX27+l9jbLKSqZ6bKNjzQvHaT8IIFUy+YIqmXQdjYCToGWwOItDrfby14c78aDd5NHQl29xingXfCdLQ==", "cpu": [ "x64" ], @@ -558,28 +549,28 @@ } }, "node_modules/@floating-ui/core": { - "version": "1.6.0", - "resolved": "https://registry.npmjs.org/@floating-ui/core/-/core-1.6.0.tgz", - "integrity": "sha512-PcF++MykgmTj3CIyOQbKA/hDzOAiqI3mhuoN44WRCopIs1sgoDoU4oty4Jtqaj/y3oDU6fnVSm4QG0a3t5i0+g==", + "version": "1.6.1", + "resolved": "https://registry.npmjs.org/@floating-ui/core/-/core-1.6.1.tgz", + "integrity": "sha512-42UH54oPZHPdRHdw6BgoBD6cg/eVTmVrFcgeRDM3jbO7uxSoipVcmcIGFcA5jmOHO5apcyvBhkSKES3fQJnu7A==", "dependencies": { - "@floating-ui/utils": "^0.2.1" + "@floating-ui/utils": "^0.2.0" } }, "node_modules/@floating-ui/dom": { - "version": "1.6.3", - "resolved": "https://registry.npmjs.org/@floating-ui/dom/-/dom-1.6.3.tgz", - "integrity": "sha512-RnDthu3mzPlQ31Ss/BTwQ1zjzIhr3lk1gZB1OC56h/1vEtaXkESrOqL5fQVMfXpwGtRwX+YsZBdyHtJMQnkArw==", + "version": "1.6.4", + "resolved": "https://registry.npmjs.org/@floating-ui/dom/-/dom-1.6.4.tgz", + "integrity": "sha512-0G8R+zOvQsAG1pg2Q99P21jiqxqGBW1iRe/iXHsBRBxnpXKFI8QwbB4x5KmYLggNO5m34IQgOIu9SCRfR/WWiQ==", "dependencies": { "@floating-ui/core": "^1.0.0", "@floating-ui/utils": "^0.2.0" } }, "node_modules/@floating-ui/react-dom": { - "version": "2.0.8", - "resolved": "https://registry.npmjs.org/@floating-ui/react-dom/-/react-dom-2.0.8.tgz", - "integrity": "sha512-HOdqOt3R3OGeTKidaLvJKcgg75S6tibQ3Tif4eyd91QnIJWr0NLvoXFpJA/j8HqkFSL68GDca9AuyWEHlhyClw==", + "version": "2.0.9", + "resolved": "https://registry.npmjs.org/@floating-ui/react-dom/-/react-dom-2.0.9.tgz", + "integrity": "sha512-q0umO0+LQK4+p6aGyvzASqKbKOJcAHJ7ycE9CuUvfx3s9zTHWmGJTPOIlM/hmSBfUfg/XfY5YhLBLR/LHwShQQ==", "dependencies": { - "@floating-ui/dom": "^1.6.1" + "@floating-ui/dom": "^1.0.0" }, "peerDependencies": { "react": ">=16.8.0", @@ -587,14 +578,14 @@ } }, "node_modules/@floating-ui/utils": { - "version": "0.2.1", - "resolved": "https://registry.npmjs.org/@floating-ui/utils/-/utils-0.2.1.tgz", - "integrity": "sha512-9TANp6GPoMtYzQdt54kfAyMmz1+osLlXdg2ENroU7zzrtflTLrrC/lgrIfaSe+Wu0b89GKccT7vxXA0MoAIO+Q==" + "version": "0.2.2", + "resolved": "https://registry.npmjs.org/@floating-ui/utils/-/utils-0.2.2.tgz", + "integrity": "sha512-J4yDIIthosAsRZ5CPYP/jQvUAQtlZTTD/4suA08/FEnlxqW3sKS9iAhgsa9VYLZ6vDHn/ixJgIqRQPotoBjxIw==" }, "node_modules/@graphiql/react": { - "version": "0.20.3", - "resolved": "https://registry.npmjs.org/@graphiql/react/-/react-0.20.3.tgz", - "integrity": "sha512-LHEiWQPABflTyRJZBZB50WSlrWER4RtlWg9XV1+D4yZQ3+6GbLM7X1zYf4D/TQ6AJB/vLZQHEnbhS0LuKcNqfA==", + "version": "0.21.0", + "resolved": "https://registry.npmjs.org/@graphiql/react/-/react-0.21.0.tgz", + "integrity": "sha512-UlXzG78HC5+CgQYXw0jVZPoZX0Uh2jPIrqLBIxAdAWMZsmcHMZHAujZtION1pbIrv22cWxP95W+8RpDIHijYow==", "dependencies": { "@graphiql/toolkit": "^0.9.1", "@headlessui/react": "^1.7.15", @@ -605,11 +596,11 @@ "@types/codemirror": "^5.60.8", "clsx": "^1.2.1", "codemirror": "^5.65.3", - "codemirror-graphql": "^2.0.10", + "codemirror-graphql": "^2.0.11", "copy-to-clipboard": "^3.2.0", "framer-motion": "^6.5.1", "graphql-language-service": "^5.2.0", - "markdown-it": "^12.2.0", + "markdown-it": "^14.1.0", "set-value": "^4.1.0" }, "peerDependencies": { @@ -637,9 +628,9 @@ } }, "node_modules/@headlessui/react": { - "version": "1.7.18", - "resolved": "https://registry.npmjs.org/@headlessui/react/-/react-1.7.18.tgz", - "integrity": "sha512-4i5DOrzwN4qSgNsL4Si61VMkUcWbcSKueUV7sFhpHzQcSShdlHENE5+QBntMSRvHt8NyoFO2AGG8si9lq+w4zQ==", + "version": "1.7.19", + "resolved": "https://registry.npmjs.org/@headlessui/react/-/react-1.7.19.tgz", + "integrity": "sha512-Ll+8q3OlMJfJbAKM/+/Y2q6PPYbryqNTXDbryx7SXLIDamkF6iQFbriYHga0dY44PvDhvvBWCx1Xj4U5+G4hOw==", "dependencies": { "@tanstack/react-virtual": "^3.0.0-beta.60", "client-only": "^0.0.1" @@ -702,9 +693,9 @@ } }, "node_modules/@humanwhocodes/object-schema": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/@humanwhocodes/object-schema/-/object-schema-2.0.2.tgz", - "integrity": "sha512-6EwiSjwWYP7pTckG6I5eyFANjPhmPjUX9JRLUSfNPC7FX7zK9gyZAfUEaECL6ALTpGX5AjnBq3C9XmVWPitNpw==", + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/@humanwhocodes/object-schema/-/object-schema-2.0.3.tgz", + "integrity": "sha512-93zYdMES/c1D69yZiKDBj0V24vqNzB/koF26KPaagAfd3P/4gUlh3Dys5ogAK+Exi9QyzlD8x/08Zt7wIKcDcA==", "dev": true }, "node_modules/@lezer/common": { @@ -1455,9 +1446,9 @@ } }, "node_modules/@rollup/rollup-android-arm-eabi": { - "version": "4.12.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.12.1.tgz", - "integrity": "sha512-iU2Sya8hNn1LhsYyf0N+L4Gf9Qc+9eBTJJJsaOGUp+7x4n2M9dxTt8UvhJl3oeftSjblSlpCfvjA/IfP3g5VjQ==", + "version": "4.17.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.17.2.tgz", + "integrity": "sha512-NM0jFxY8bB8QLkoKxIQeObCaDlJKewVlIEkuyYKm5An1tdVZ966w2+MPQ2l8LBZLjR+SgyV+nRkTIunzOYBMLQ==", "cpu": [ "arm" ], @@ -1468,9 +1459,9 @@ ] }, "node_modules/@rollup/rollup-android-arm64": { - "version": "4.12.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.12.1.tgz", - "integrity": "sha512-wlzcWiH2Ir7rdMELxFE5vuM7D6TsOcJ2Yw0c3vaBR3VOsJFVTx9xvwnAvhgU5Ii8Gd6+I11qNHwndDscIm0HXg==", + "version": "4.17.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.17.2.tgz", + "integrity": "sha512-yeX/Usk7daNIVwkq2uGoq2BYJKZY1JfyLTaHO/jaiSwi/lsf8fTFoQW/n6IdAsx5tx+iotu2zCJwz8MxI6D/Bw==", "cpu": [ "arm64" ], @@ -1481,9 +1472,9 @@ ] }, "node_modules/@rollup/rollup-darwin-arm64": { - "version": "4.12.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.12.1.tgz", - "integrity": "sha512-YRXa1+aZIFN5BaImK+84B3uNK8C6+ynKLPgvn29X9s0LTVCByp54TB7tdSMHDR7GTV39bz1lOmlLDuedgTwwHg==", + "version": "4.17.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.17.2.tgz", + "integrity": "sha512-kcMLpE6uCwls023+kknm71ug7MZOrtXo+y5p/tsg6jltpDtgQY1Eq5sGfHcQfb+lfuKwhBmEURDga9N0ol4YPw==", "cpu": [ "arm64" ], @@ -1494,9 +1485,9 @@ ] }, "node_modules/@rollup/rollup-darwin-x64": { - "version": "4.12.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.12.1.tgz", - "integrity": "sha512-opjWJ4MevxeA8FhlngQWPBOvVWYNPFkq6/25rGgG+KOy0r8clYwL1CFd+PGwRqqMFVQ4/Qd3sQu5t7ucP7C/Uw==", + "version": "4.17.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.17.2.tgz", + "integrity": "sha512-AtKwD0VEx0zWkL0ZjixEkp5tbNLzX+FCqGG1SvOu993HnSz4qDI6S4kGzubrEJAljpVkhRSlg5bzpV//E6ysTQ==", "cpu": [ "x64" ], @@ -1507,9 +1498,22 @@ ] }, "node_modules/@rollup/rollup-linux-arm-gnueabihf": { - "version": "4.12.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.12.1.tgz", - "integrity": "sha512-uBkwaI+gBUlIe+EfbNnY5xNyXuhZbDSx2nzzW8tRMjUmpScd6lCQYKY2V9BATHtv5Ef2OBq6SChEP8h+/cxifQ==", + "version": "4.17.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.17.2.tgz", + "integrity": "sha512-3reX2fUHqN7sffBNqmEyMQVj/CKhIHZd4y631duy0hZqI8Qoqf6lTtmAKvJFYa6bhU95B1D0WgzHkmTg33In0A==", + "cpu": [ + "arm" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm-musleabihf": { + "version": "4.17.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.17.2.tgz", + "integrity": "sha512-uSqpsp91mheRgw96xtyAGP9FW5ChctTFEoXP0r5FAzj/3ZRv3Uxjtc7taRQSaQM/q85KEKjKsZuiZM3GyUivRg==", "cpu": [ "arm" ], @@ -1520,9 +1524,9 @@ ] }, "node_modules/@rollup/rollup-linux-arm64-gnu": { - "version": "4.12.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.12.1.tgz", - "integrity": "sha512-0bK9aG1kIg0Su7OcFTlexkVeNZ5IzEsnz1ept87a0TUgZ6HplSgkJAnFpEVRW7GRcikT4GlPV0pbtVedOaXHQQ==", + "version": "4.17.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.17.2.tgz", + "integrity": "sha512-EMMPHkiCRtE8Wdk3Qhtciq6BndLtstqZIroHiiGzB3C5LDJmIZcSzVtLRbwuXuUft1Cnv+9fxuDtDxz3k3EW2A==", "cpu": [ "arm64" ], @@ -1533,9 +1537,9 @@ ] }, "node_modules/@rollup/rollup-linux-arm64-musl": { - "version": "4.12.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.12.1.tgz", - "integrity": "sha512-qB6AFRXuP8bdkBI4D7UPUbE7OQf7u5OL+R94JE42Z2Qjmyj74FtDdLGeriRyBDhm4rQSvqAGCGC01b8Fu2LthQ==", + "version": "4.17.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.17.2.tgz", + "integrity": "sha512-NMPylUUZ1i0z/xJUIx6VUhISZDRT+uTWpBcjdv0/zkp7b/bQDF+NfnfdzuTiB1G6HTodgoFa93hp0O1xl+/UbA==", "cpu": [ "arm64" ], @@ -1545,10 +1549,23 @@ "linux" ] }, + "node_modules/@rollup/rollup-linux-powerpc64le-gnu": { + "version": "4.17.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-powerpc64le-gnu/-/rollup-linux-powerpc64le-gnu-4.17.2.tgz", + "integrity": "sha512-T19My13y8uYXPw/L/k0JYaX1fJKFT/PWdXiHr8mTbXWxjVF1t+8Xl31DgBBvEKclw+1b00Chg0hxE2O7bTG7GQ==", + "cpu": [ + "ppc64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, "node_modules/@rollup/rollup-linux-riscv64-gnu": { - "version": "4.12.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.12.1.tgz", - "integrity": "sha512-sHig3LaGlpNgDj5o8uPEoGs98RII8HpNIqFtAI8/pYABO8i0nb1QzT0JDoXF/pxzqO+FkxvwkHZo9k0NJYDedg==", + "version": "4.17.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.17.2.tgz", + "integrity": "sha512-BOaNfthf3X3fOWAB+IJ9kxTgPmMqPPH5f5k2DcCsRrBIbWnaJCgX2ll77dV1TdSy9SaXTR5iDXRL8n7AnoP5cg==", "cpu": [ "riscv64" ], @@ -1558,10 +1575,23 @@ "linux" ] }, + "node_modules/@rollup/rollup-linux-s390x-gnu": { + "version": "4.17.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.17.2.tgz", + "integrity": "sha512-W0UP/x7bnn3xN2eYMql2T/+wpASLE5SjObXILTMPUBDB/Fg/FxC+gX4nvCfPBCbNhz51C+HcqQp2qQ4u25ok6g==", + "cpu": [ + "s390x" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, "node_modules/@rollup/rollup-linux-x64-gnu": { - "version": "4.12.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.12.1.tgz", - "integrity": "sha512-nD3YcUv6jBJbBNFvSbp0IV66+ba/1teuBcu+fBBPZ33sidxitc6ErhON3JNavaH8HlswhWMC3s5rgZpM4MtPqQ==", + "version": "4.17.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.17.2.tgz", + "integrity": "sha512-Hy7pLwByUOuyaFC6mAr7m+oMC+V7qyifzs/nW2OJfC8H4hbCzOX07Ov0VFk/zP3kBsELWNFi7rJtgbKYsav9QQ==", "cpu": [ "x64" ], @@ -1572,9 +1602,9 @@ ] }, "node_modules/@rollup/rollup-linux-x64-musl": { - "version": "4.12.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.12.1.tgz", - "integrity": "sha512-7/XVZqgBby2qp/cO0TQ8uJK+9xnSdJ9ct6gSDdEr4MfABrjTyrW6Bau7HQ73a2a5tPB7hno49A0y1jhWGDN9OQ==", + "version": "4.17.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.17.2.tgz", + "integrity": "sha512-h1+yTWeYbRdAyJ/jMiVw0l6fOOm/0D1vNLui9iPuqgRGnXA0u21gAqOyB5iHjlM9MMfNOm9RHCQ7zLIzT0x11Q==", "cpu": [ "x64" ], @@ -1585,9 +1615,9 @@ ] }, "node_modules/@rollup/rollup-win32-arm64-msvc": { - "version": "4.12.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.12.1.tgz", - "integrity": "sha512-CYc64bnICG42UPL7TrhIwsJW4QcKkIt9gGlj21gq3VV0LL6XNb1yAdHVp1pIi9gkts9gGcT3OfUYHjGP7ETAiw==", + "version": "4.17.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.17.2.tgz", + "integrity": "sha512-tmdtXMfKAjy5+IQsVtDiCfqbynAQE/TQRpWdVataHmhMb9DCoJxp9vLcCBjEQWMiUYxO1QprH/HbY9ragCEFLA==", "cpu": [ "arm64" ], @@ -1598,9 +1628,9 @@ ] }, "node_modules/@rollup/rollup-win32-ia32-msvc": { - "version": "4.12.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.12.1.tgz", - "integrity": "sha512-LN+vnlZ9g0qlHGlS920GR4zFCqAwbv2lULrR29yGaWP9u7wF5L7GqWu9Ah6/kFZPXPUkpdZwd//TNR+9XC9hvA==", + "version": "4.17.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.17.2.tgz", + "integrity": "sha512-7II/QCSTAHuE5vdZaQEwJq2ZACkBpQDOmQsE6D6XUbnBHW8IAhm4eTufL6msLJorzrHDFv3CF8oCA/hSIRuZeQ==", "cpu": [ "ia32" ], @@ -1611,9 +1641,9 @@ ] }, "node_modules/@rollup/rollup-win32-x64-msvc": { - "version": "4.12.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.12.1.tgz", - "integrity": "sha512-n+vkrSyphvmU0qkQ6QBNXCGr2mKjhP08mPRM/Xp5Ck2FV4NrHU+y6axzDeixUrCBHVUS51TZhjqrKBBsHLKb2Q==", + "version": "4.17.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.17.2.tgz", + "integrity": "sha512-TGGO7v7qOq4CYmSBVEYpI1Y5xDuCEnbVC5Vth8mOsW0gDSzxNrVERPc790IGHsrT2dQSimgMr9Ub3Y1Jci5/8w==", "cpu": [ "x64" ], @@ -1624,12 +1654,12 @@ ] }, "node_modules/@swagger-api/apidom-ast": { - "version": "0.97.0", - "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ast/-/apidom-ast-0.97.0.tgz", - "integrity": "sha512-KpPyC8x5ZrB4l9+jgl8FAhokedh+8b5VuBTTdTJKFf+x5uznMiBf/MZTWgvsIk8/9MtjkQYUN1qgVzEPiKWvHg==", + "version": "0.99.1", + "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ast/-/apidom-ast-0.99.1.tgz", + "integrity": "sha512-evkKm2JaqNfg3dB2Yk3FWL/Qy2r4csZLMZ9bHMG+xNpti8ulENHMjuCh3Ry4koV1gD7IA54CU2ZjcaTvqJa22Q==", "dependencies": { "@babel/runtime-corejs3": "^7.20.7", - "@swagger-api/apidom-error": "^0.97.0", + "@swagger-api/apidom-error": "^0.99.0", "@types/ramda": "~0.29.6", "ramda": "~0.29.1", "ramda-adjunct": "^4.1.1", @@ -1637,13 +1667,13 @@ } }, "node_modules/@swagger-api/apidom-core": { - "version": "0.97.0", - "resolved": "https://registry.npmjs.org/@swagger-api/apidom-core/-/apidom-core-0.97.0.tgz", - "integrity": "sha512-3LYlN0Cox0FBFNZqmgi7VyJ4MXppCmZoFjlurT+Y90ND1y2lCidcwjAthr3QpV8b+UCc7MG3APBGRfwqaYZ2IA==", + "version": "0.99.1", + "resolved": "https://registry.npmjs.org/@swagger-api/apidom-core/-/apidom-core-0.99.1.tgz", + "integrity": "sha512-oWU9Re2B7hPFAnm4ymN2HNOqevMqZsvL4Fjud2qN+KFWNvZ1/r8kwQaj0Pba5Kwka2bcWo0aEfWNayP4axTB+Q==", "dependencies": { "@babel/runtime-corejs3": "^7.20.7", - "@swagger-api/apidom-ast": "^0.97.0", - "@swagger-api/apidom-error": "^0.97.0", + "@swagger-api/apidom-ast": "^0.99.1", + "@swagger-api/apidom-error": "^0.99.0", "@types/ramda": "~0.29.6", "minim": "~0.23.8", "ramda": "~0.29.1", @@ -1653,36 +1683,36 @@ } }, "node_modules/@swagger-api/apidom-error": { - "version": "0.97.0", - "resolved": "https://registry.npmjs.org/@swagger-api/apidom-error/-/apidom-error-0.97.0.tgz", - "integrity": "sha512-Y2YRnsJSXp+MdgwwMSCtidzJfy/bL6CZEpc+5aWUw1mphTjfLZC66uA4btUgUevyiT6mNHXm8tUmGomHA7Izdw==", + "version": "0.99.0", + "resolved": "https://registry.npmjs.org/@swagger-api/apidom-error/-/apidom-error-0.99.0.tgz", + "integrity": "sha512-ZdFdn+GeIo23X2GKFrfH4Y5KY8yTzVF1l/Mqjs8+nD30LTbYg6f3ITHn429dk8fDT3NT69fG+gGm60FAFaKkeQ==", "dependencies": { "@babel/runtime-corejs3": "^7.20.7" } }, "node_modules/@swagger-api/apidom-json-pointer": { - "version": "0.97.0", - "resolved": "https://registry.npmjs.org/@swagger-api/apidom-json-pointer/-/apidom-json-pointer-0.97.0.tgz", - "integrity": "sha512-9vcgePgcYXUiYEqnvx8Ew04j8JtfenosysbSuGgRs93Ls8mQ/+ndIOklHaXJzNjBZZxqxS0p6QLFcj1jpUiojQ==", + "version": "0.99.1", + "resolved": "https://registry.npmjs.org/@swagger-api/apidom-json-pointer/-/apidom-json-pointer-0.99.1.tgz", + "integrity": "sha512-4fOOKTLoBWpfX2eGNx93sqBsS1KRCtBFOq75n1jMcRbs1rrj+JxcaiTFUE+6BZqIqBsCqTmRMYE/HsgwBS3vhQ==", "dependencies": { "@babel/runtime-corejs3": "^7.20.7", - "@swagger-api/apidom-core": "^0.97.0", - "@swagger-api/apidom-error": "^0.97.0", + "@swagger-api/apidom-core": "^0.99.1", + "@swagger-api/apidom-error": "^0.99.0", "@types/ramda": "~0.29.6", "ramda": "~0.29.1", "ramda-adjunct": "^4.0.0" } }, "node_modules/@swagger-api/apidom-ns-api-design-systems": { - "version": "0.97.0", - "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ns-api-design-systems/-/apidom-ns-api-design-systems-0.97.0.tgz", - "integrity": "sha512-uSTIEX4q9XWoP9TQq9nEtW5xG3hVQN2VD5spYoxvYlzUOtg12yxkVgu776eq0kVZd74acZhKIF7mn3uiqaQcHA==", + "version": "0.99.1", + "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ns-api-design-systems/-/apidom-ns-api-design-systems-0.99.1.tgz", + "integrity": "sha512-LID3n+Y2eKBzaR7oYShto48+EFPBLZLuKIJdEZ53is6SqD5jHS0Ev6xLj2QfqSIQR3OoVN3PUOrz724Jkpiv/A==", "optional": true, "dependencies": { "@babel/runtime-corejs3": "^7.20.7", - "@swagger-api/apidom-core": "^0.97.0", - "@swagger-api/apidom-error": "^0.97.0", - "@swagger-api/apidom-ns-openapi-3-1": "^0.97.0", + "@swagger-api/apidom-core": "^0.99.1", + "@swagger-api/apidom-error": "^0.99.0", + "@swagger-api/apidom-ns-openapi-3-1": "^0.99.1", "@types/ramda": "~0.29.6", "ramda": "~0.29.1", "ramda-adjunct": "^4.1.1", @@ -1690,14 +1720,14 @@ } }, "node_modules/@swagger-api/apidom-ns-asyncapi-2": { - "version": "0.97.0", - "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ns-asyncapi-2/-/apidom-ns-asyncapi-2-0.97.0.tgz", - "integrity": "sha512-buEQSrXdtjoAkqIWSZ448HlvnareupthIoObYELp25LVuQwhxxVSY3NR0aCIR37GHgSchrmPBVcsvPMtXV96BA==", + "version": "0.99.1", + "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ns-asyncapi-2/-/apidom-ns-asyncapi-2-0.99.1.tgz", + "integrity": "sha512-fAUsKbg0MuvEPjE2UWQu+62K0eh/3yTE2M5u/QCqpj48IpByMNYLKU9ICfMMAzBjXNQAVuEr07/UgY9CRHUVhA==", "optional": true, "dependencies": { "@babel/runtime-corejs3": "^7.20.7", - "@swagger-api/apidom-core": "^0.97.0", - "@swagger-api/apidom-ns-json-schema-draft-7": "^0.97.0", + "@swagger-api/apidom-core": "^0.99.1", + "@swagger-api/apidom-ns-json-schema-draft-7": "^0.99.1", "@types/ramda": "~0.29.6", "ramda": "~0.29.1", "ramda-adjunct": "^4.1.1", @@ -1705,13 +1735,13 @@ } }, "node_modules/@swagger-api/apidom-ns-json-schema-draft-4": { - "version": "0.97.0", - "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ns-json-schema-draft-4/-/apidom-ns-json-schema-draft-4-0.97.0.tgz", - "integrity": "sha512-eBMIPxX4huNDGle6TOfSe1kKS1/HvL6w66GWWLFxZW2doCQHMADgjo7j/kVowrXiJtEoMgjBVp3W30WkcwBVug==", + "version": "0.99.1", + "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ns-json-schema-draft-4/-/apidom-ns-json-schema-draft-4-0.99.1.tgz", + "integrity": "sha512-HdxD4WXnaMJsdodrWoynzgteg9UDaZsVkX04oObQPR3C1ZWW9KahEGBSbtr/oBhnE/QgiPfNHUDWrQvk3oC6lg==", "dependencies": { "@babel/runtime-corejs3": "^7.20.7", - "@swagger-api/apidom-ast": "^0.97.0", - "@swagger-api/apidom-core": "^0.97.0", + "@swagger-api/apidom-ast": "^0.99.1", + "@swagger-api/apidom-core": "^0.99.1", "@types/ramda": "~0.29.6", "ramda": "~0.29.1", "ramda-adjunct": "^4.1.1", @@ -1719,15 +1749,15 @@ } }, "node_modules/@swagger-api/apidom-ns-json-schema-draft-6": { - "version": "0.97.0", - "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ns-json-schema-draft-6/-/apidom-ns-json-schema-draft-6-0.97.0.tgz", - "integrity": "sha512-tRbg3/b4aJGfcODc0HDngZDjBdhPAv8OZM1OZdsqI4EEIw3PI/wpd+b6b8a5udOjAdbUYqnYsq6gCylCDNBnzw==", + "version": "0.99.1", + "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ns-json-schema-draft-6/-/apidom-ns-json-schema-draft-6-0.99.1.tgz", + "integrity": "sha512-O6A25j9y+Hjvwwq8x+uTaIhK4tp0CqO6YrFRXmfmOnkBtJ6Q66jqbvRzIN9XQfW8VaIipqAlOin++ufsfuDd1g==", "optional": true, "dependencies": { "@babel/runtime-corejs3": "^7.20.7", - "@swagger-api/apidom-core": "^0.97.0", - "@swagger-api/apidom-error": "^0.97.0", - "@swagger-api/apidom-ns-json-schema-draft-4": "^0.97.0", + "@swagger-api/apidom-core": "^0.99.1", + "@swagger-api/apidom-error": "^0.99.0", + "@swagger-api/apidom-ns-json-schema-draft-4": "^0.99.1", "@types/ramda": "~0.29.6", "ramda": "~0.29.1", "ramda-adjunct": "^4.1.1", @@ -1735,15 +1765,15 @@ } }, "node_modules/@swagger-api/apidom-ns-json-schema-draft-7": { - "version": "0.97.0", - "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ns-json-schema-draft-7/-/apidom-ns-json-schema-draft-7-0.97.0.tgz", - "integrity": "sha512-0GITsoa6kVVkoKBUxyeODmh6vjGXuvDQZd3Vxs1nz0c/O6ZR+VBfBB3JW5wzhVr+WCXebaOJGDyWkxJMHKycxw==", + "version": "0.99.1", + "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ns-json-schema-draft-7/-/apidom-ns-json-schema-draft-7-0.99.1.tgz", + "integrity": "sha512-I4IpTkAlParfUWOi5kJU7jQqeMKy39JOWiRz8jTyPoZ8vvixVgyIlOS7/bj5uLxbBw3QxOFXPuIqUvK1uFElAg==", "optional": true, "dependencies": { "@babel/runtime-corejs3": "^7.20.7", - "@swagger-api/apidom-core": "^0.97.0", - "@swagger-api/apidom-error": "^0.97.0", - "@swagger-api/apidom-ns-json-schema-draft-6": "^0.97.0", + "@swagger-api/apidom-core": "^0.99.1", + "@swagger-api/apidom-error": "^0.99.0", + "@swagger-api/apidom-ns-json-schema-draft-6": "^0.99.1", "@types/ramda": "~0.29.6", "ramda": "~0.29.1", "ramda-adjunct": "^4.1.1", @@ -1751,15 +1781,15 @@ } }, "node_modules/@swagger-api/apidom-ns-openapi-2": { - "version": "0.97.0", - "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ns-openapi-2/-/apidom-ns-openapi-2-0.97.0.tgz", - "integrity": "sha512-5gOA9FiO1J9OxJhcVBeXdm77kuh2cwPXG6Sh/DOlbk733Pz9v9W0aQgpLi5Ltsgagxe1sHhBqxJ1asw10QFzzw==", + "version": "0.99.1", + "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ns-openapi-2/-/apidom-ns-openapi-2-0.99.1.tgz", + "integrity": "sha512-ChEd1RaJKrYskLTmlH8NL9tNpAgroSPklTwJCvHmZjzaWvW7N/B2diHBOaz+rnVLiW9Hb7QOlR/biEXJn7OUIg==", "optional": true, "dependencies": { "@babel/runtime-corejs3": "^7.20.7", - "@swagger-api/apidom-core": "^0.97.0", - "@swagger-api/apidom-error": "^0.97.0", - "@swagger-api/apidom-ns-json-schema-draft-4": "^0.97.0", + "@swagger-api/apidom-core": "^0.99.1", + "@swagger-api/apidom-error": "^0.99.0", + "@swagger-api/apidom-ns-json-schema-draft-4": "^0.99.1", "@types/ramda": "~0.29.6", "ramda": "~0.29.1", "ramda-adjunct": "^4.1.1", @@ -1767,14 +1797,14 @@ } }, "node_modules/@swagger-api/apidom-ns-openapi-3-0": { - "version": "0.97.0", - "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ns-openapi-3-0/-/apidom-ns-openapi-3-0-0.97.0.tgz", - "integrity": "sha512-fbnN87SF0WN/4DcSpceuo+NUtkAGeicMIucEMF+LIIiCAF27Xi5d6Q823i9DgOEfJtifHKVj6Zhl/zSKAD2eyw==", + "version": "0.99.1", + "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ns-openapi-3-0/-/apidom-ns-openapi-3-0-0.99.1.tgz", + "integrity": "sha512-9lfa2a+4rLp+1loEXrr+Dq3whdBwBWHukctsX/C/cGr4SG0NO8+tmS3FLsOD+ly6O/YPdszPDxVcIqqNV8J2uA==", "dependencies": { "@babel/runtime-corejs3": "^7.20.7", - "@swagger-api/apidom-core": "^0.97.0", - "@swagger-api/apidom-error": "^0.97.0", - "@swagger-api/apidom-ns-json-schema-draft-4": "^0.97.0", + "@swagger-api/apidom-core": "^0.99.1", + "@swagger-api/apidom-error": "^0.99.0", + "@swagger-api/apidom-ns-json-schema-draft-4": "^0.99.1", "@types/ramda": "~0.29.6", "ramda": "~0.29.1", "ramda-adjunct": "^4.1.1", @@ -1782,14 +1812,14 @@ } }, "node_modules/@swagger-api/apidom-ns-openapi-3-1": { - "version": "0.97.0", - "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ns-openapi-3-1/-/apidom-ns-openapi-3-1-0.97.0.tgz", - "integrity": "sha512-DyvkTim+t7iVKyze6N3tITsfyElthmOwOcxwOjKj/3lySEy61DuY4X2FaPD5+owftVDxMs4Q6F9Chm7qv91a+Q==", + "version": "0.99.1", + "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ns-openapi-3-1/-/apidom-ns-openapi-3-1-0.99.1.tgz", + "integrity": "sha512-XsRxM9WC+WywBo+rr/YUayQRsV2mN8AzBxVlKzJoZ+pBgmPYe24n3Ma/0FTr8zGwQyg4DtOBwydlYz8QFrLPFA==", "dependencies": { "@babel/runtime-corejs3": "^7.20.7", - "@swagger-api/apidom-ast": "^0.97.0", - "@swagger-api/apidom-core": "^0.97.0", - "@swagger-api/apidom-ns-openapi-3-0": "^0.97.0", + "@swagger-api/apidom-ast": "^0.99.1", + "@swagger-api/apidom-core": "^0.99.1", + "@swagger-api/apidom-ns-openapi-3-0": "^0.99.1", "@types/ramda": "~0.29.6", "ramda": "~0.29.1", "ramda-adjunct": "^4.1.1", @@ -1797,14 +1827,14 @@ } }, "node_modules/@swagger-api/apidom-ns-workflows-1": { - "version": "0.97.0", - "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ns-workflows-1/-/apidom-ns-workflows-1-0.97.0.tgz", - "integrity": "sha512-eIuoTRSITlUtMjpM3J0H9b2rVeEVu13i/Fv6+ZMPob0yHmQBWo9bnLjxxnfEZkpvp050worKULfNMdJV8NKBkA==", + "version": "0.99.1", + "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ns-workflows-1/-/apidom-ns-workflows-1-0.99.1.tgz", + "integrity": "sha512-s6SmFzlBmKKRdlyLdZsjXHYJ+7+AuDyK3qrBAPHX7mDe/uN6D7QPGD05oCzHytPhbeZQPMf0wi9vPUrM1s1xvw==", "optional": true, "dependencies": { "@babel/runtime-corejs3": "^7.20.7", - "@swagger-api/apidom-core": "^0.97.0", - "@swagger-api/apidom-ns-openapi-3-1": "^0.97.0", + "@swagger-api/apidom-core": "^0.99.1", + "@swagger-api/apidom-ns-openapi-3-1": "^0.99.1", "@types/ramda": "~0.29.6", "ramda": "~0.29.1", "ramda-adjunct": "^4.1.1", @@ -1812,75 +1842,75 @@ } }, "node_modules/@swagger-api/apidom-parser-adapter-api-design-systems-json": { - "version": "0.97.0", - "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-api-design-systems-json/-/apidom-parser-adapter-api-design-systems-json-0.97.0.tgz", - "integrity": "sha512-ZDzaiTHMEpz0kM0/iyHEjySTf0xoLKDJwJiSxKNuew141k0rakTVeVisxXeq+6JQi2eC6KuyS98DHMe7hEIVUw==", + "version": "0.99.1", + "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-api-design-systems-json/-/apidom-parser-adapter-api-design-systems-json-0.99.1.tgz", + "integrity": "sha512-ONeGsOZPZ16SvYbfHKiLjg8IeKGg+nJC+fOIqnelGdMCu/34ed0X7k6XQZGrwbDtmSd3SkXykL3F55H5BFiUPQ==", "optional": true, "dependencies": { "@babel/runtime-corejs3": "^7.20.7", - "@swagger-api/apidom-core": "^0.97.0", - "@swagger-api/apidom-ns-api-design-systems": "^0.97.0", - "@swagger-api/apidom-parser-adapter-json": "^0.97.0", + "@swagger-api/apidom-core": "^0.99.1", + "@swagger-api/apidom-ns-api-design-systems": "^0.99.1", + "@swagger-api/apidom-parser-adapter-json": "^0.99.1", "@types/ramda": "~0.29.6", "ramda": "~0.29.1", "ramda-adjunct": "^4.0.0" } }, "node_modules/@swagger-api/apidom-parser-adapter-api-design-systems-yaml": { - "version": "0.97.0", - "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-api-design-systems-yaml/-/apidom-parser-adapter-api-design-systems-yaml-0.97.0.tgz", - "integrity": "sha512-5/BziPWqrHLr91VR+EC4pXt/fNToWMmvG+d7RVjksHinrjps2E6HA+oZOhqKqA2LRCLNjGhNUptXzRMDjjtenw==", + "version": "0.99.1", + "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-api-design-systems-yaml/-/apidom-parser-adapter-api-design-systems-yaml-0.99.1.tgz", + "integrity": "sha512-mVOHebofGhI3E8HW/7YsqGOpIWOBSMc5R5aQFMYMYpTxrpDHNhyEfFEWqZRAoC2Hin9NZ2BeI/hsrXGIw/LoeQ==", "optional": true, "dependencies": { "@babel/runtime-corejs3": "^7.20.7", - "@swagger-api/apidom-core": "^0.97.0", - "@swagger-api/apidom-ns-api-design-systems": "^0.97.0", - "@swagger-api/apidom-parser-adapter-yaml-1-2": "^0.97.0", + "@swagger-api/apidom-core": "^0.99.1", + "@swagger-api/apidom-ns-api-design-systems": "^0.99.1", + "@swagger-api/apidom-parser-adapter-yaml-1-2": "^0.99.1", "@types/ramda": "~0.29.6", "ramda": "~0.29.1", "ramda-adjunct": "^4.0.0" } }, "node_modules/@swagger-api/apidom-parser-adapter-asyncapi-json-2": { - "version": "0.97.0", - "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-asyncapi-json-2/-/apidom-parser-adapter-asyncapi-json-2-0.97.0.tgz", - "integrity": "sha512-XLD/YZifnhezRQY5ADQQAje5G5qtZ4GAbXk//1sRNe3R/qCk1pDxmRYr27yzt8w1XhfM+9VQmCTI21ZFpNFQOA==", + "version": "0.99.1", + "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-asyncapi-json-2/-/apidom-parser-adapter-asyncapi-json-2-0.99.1.tgz", + "integrity": "sha512-2kKVf5ecTuDirPpk8nDRyTrT0tkrWjdaUPwJ/+l2RdgWYObNVwdX2lAS9URC4zK/drdQOQxjetF+aDQBBhXmXA==", "optional": true, "dependencies": { "@babel/runtime-corejs3": "^7.20.7", - "@swagger-api/apidom-core": "^0.97.0", - "@swagger-api/apidom-ns-asyncapi-2": "^0.97.0", - "@swagger-api/apidom-parser-adapter-json": "^0.97.0", + "@swagger-api/apidom-core": "^0.99.1", + "@swagger-api/apidom-ns-asyncapi-2": "^0.99.1", + "@swagger-api/apidom-parser-adapter-json": "^0.99.1", "@types/ramda": "~0.29.6", "ramda": "~0.29.1", "ramda-adjunct": "^4.0.0" } }, "node_modules/@swagger-api/apidom-parser-adapter-asyncapi-yaml-2": { - "version": "0.97.0", - "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-asyncapi-yaml-2/-/apidom-parser-adapter-asyncapi-yaml-2-0.97.0.tgz", - "integrity": "sha512-whyThDiGN4FoNirgY0XtXF7IJeU6NfsrBwjaxCkYBuSPslZBoWy4ojEQbfg+2HqNLbnHKJyvabh9/tSIxgB92A==", + "version": "0.99.1", + "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-asyncapi-yaml-2/-/apidom-parser-adapter-asyncapi-yaml-2-0.99.1.tgz", + "integrity": "sha512-UX+rLOUSQuWe5yNXS8eLFvDhCA1CP5r80jLtvT3n0FDnss4+9WkPlqgj4UPH4XoitXSvBVOZxbdjNwfKtJzsHA==", "optional": true, "dependencies": { "@babel/runtime-corejs3": "^7.20.7", - "@swagger-api/apidom-core": "^0.97.0", - "@swagger-api/apidom-ns-asyncapi-2": "^0.97.0", - "@swagger-api/apidom-parser-adapter-yaml-1-2": "^0.97.0", + "@swagger-api/apidom-core": "^0.99.1", + "@swagger-api/apidom-ns-asyncapi-2": "^0.99.1", + "@swagger-api/apidom-parser-adapter-yaml-1-2": "^0.99.1", "@types/ramda": "~0.29.6", "ramda": "~0.29.1", "ramda-adjunct": "^4.0.0" } }, "node_modules/@swagger-api/apidom-parser-adapter-json": { - "version": "0.97.0", - "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-json/-/apidom-parser-adapter-json-0.97.0.tgz", - "integrity": "sha512-MPhAX77Z9Csti+Kljtbrl/ez2H610R4fQg0RnkNW40f4e6TXeOogT5tmceeWP+IKGAKX45HA1JpVPxdtSJn3ww==", + "version": "0.99.1", + "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-json/-/apidom-parser-adapter-json-0.99.1.tgz", + "integrity": "sha512-qVeSdhaDIggIkFtMI4aqqv4MYuJlRQ6pniP+Li+DjcHeTKYHelX0OwoznaTlLlZ1tM9QFaMi8rw8xfGp6vMHgg==", "optional": true, "dependencies": { "@babel/runtime-corejs3": "^7.20.7", - "@swagger-api/apidom-ast": "^0.97.0", - "@swagger-api/apidom-core": "^0.97.0", - "@swagger-api/apidom-error": "^0.97.0", + "@swagger-api/apidom-ast": "^0.99.1", + "@swagger-api/apidom-core": "^0.99.1", + "@swagger-api/apidom-error": "^0.99.0", "@types/ramda": "~0.29.6", "ramda": "~0.29.1", "ramda-adjunct": "^4.1.1", @@ -1890,135 +1920,135 @@ } }, "node_modules/@swagger-api/apidom-parser-adapter-openapi-json-2": { - "version": "0.97.0", - "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-openapi-json-2/-/apidom-parser-adapter-openapi-json-2-0.97.0.tgz", - "integrity": "sha512-HtaoRN7wnVB2ilxs/RpLBR7+MwIfUqUcdCzC/EVV788CnSbutwj61W3jR2w9BRXeANJ4K2APcvU4W7WiI9Sugg==", + "version": "0.99.1", + "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-openapi-json-2/-/apidom-parser-adapter-openapi-json-2-0.99.1.tgz", + "integrity": "sha512-aHzdast9HMeGTaTUWwVovMcspEVCAdvBJe47BzMZfzcVOnZlAVyTmLqxQ/3s9fjseRrPhFYqKtCOKROzbWeAhg==", "optional": true, "dependencies": { "@babel/runtime-corejs3": "^7.20.7", - "@swagger-api/apidom-core": "^0.97.0", - "@swagger-api/apidom-ns-openapi-2": "^0.97.0", - "@swagger-api/apidom-parser-adapter-json": "^0.97.0", + "@swagger-api/apidom-core": "^0.99.1", + "@swagger-api/apidom-ns-openapi-2": "^0.99.1", + "@swagger-api/apidom-parser-adapter-json": "^0.99.1", "@types/ramda": "~0.29.6", "ramda": "~0.29.1", "ramda-adjunct": "^4.0.0" } }, "node_modules/@swagger-api/apidom-parser-adapter-openapi-json-3-0": { - "version": "0.97.0", - "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-openapi-json-3-0/-/apidom-parser-adapter-openapi-json-3-0-0.97.0.tgz", - "integrity": "sha512-psfxh7k671HukibaY53cems0fcsLQP8U5lQPzVDevEGJQoguAWHyV2C5kOr52XOJInmsN5E+COEn6oPzsIaDCg==", + "version": "0.99.1", + "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-openapi-json-3-0/-/apidom-parser-adapter-openapi-json-3-0-0.99.1.tgz", + "integrity": "sha512-l/nYccP87GL611W9OCiYWUOizhhoGenuKa7Ocmaf9Rg+xIDnPw29+9p/SuGEN2jjtql0iYuNI4+ZzwiC2+teSg==", "optional": true, "dependencies": { "@babel/runtime-corejs3": "^7.20.7", - "@swagger-api/apidom-core": "^0.97.0", - "@swagger-api/apidom-ns-openapi-3-0": "^0.97.0", - "@swagger-api/apidom-parser-adapter-json": "^0.97.0", + "@swagger-api/apidom-core": "^0.99.1", + "@swagger-api/apidom-ns-openapi-3-0": "^0.99.1", + "@swagger-api/apidom-parser-adapter-json": "^0.99.1", "@types/ramda": "~0.29.6", "ramda": "~0.29.1", "ramda-adjunct": "^4.0.0" } }, "node_modules/@swagger-api/apidom-parser-adapter-openapi-json-3-1": { - "version": "0.97.0", - "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-openapi-json-3-1/-/apidom-parser-adapter-openapi-json-3-1-0.97.0.tgz", - "integrity": "sha512-PJpcLhS441ATFjbCHHhVUPd8K1JZaiFQJS7yfQEKQmA5MlBRh3w7mqCJAbZN49wuMkelTdB8qJJlVEGUDSxX5Q==", + "version": "0.99.1", + "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-openapi-json-3-1/-/apidom-parser-adapter-openapi-json-3-1-0.99.1.tgz", + "integrity": "sha512-Eie4ztKR5hgrGESBDHB9xIODTB/gvjWBwPNveZ/iSlJ/yhZGyDMC8dgv0aQiyFP01mKaaBMhyZjWgsvts9l+cQ==", "optional": true, "dependencies": { "@babel/runtime-corejs3": "^7.20.7", - "@swagger-api/apidom-core": "^0.97.0", - "@swagger-api/apidom-ns-openapi-3-1": "^0.97.0", - "@swagger-api/apidom-parser-adapter-json": "^0.97.0", + "@swagger-api/apidom-core": "^0.99.1", + "@swagger-api/apidom-ns-openapi-3-1": "^0.99.1", + "@swagger-api/apidom-parser-adapter-json": "^0.99.1", "@types/ramda": "~0.29.6", "ramda": "~0.29.1", "ramda-adjunct": "^4.0.0" } }, "node_modules/@swagger-api/apidom-parser-adapter-openapi-yaml-2": { - "version": "0.97.0", - "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-openapi-yaml-2/-/apidom-parser-adapter-openapi-yaml-2-0.97.0.tgz", - "integrity": "sha512-X5saN/AElpS+LohbSjNPesUPWYOM8Wb19+OD7/WS1r6AVRIlj5gKLy3vO7BLBvaER5G73qYylfrPxCoUPlpZZg==", + "version": "0.99.1", + "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-openapi-yaml-2/-/apidom-parser-adapter-openapi-yaml-2-0.99.1.tgz", + "integrity": "sha512-MzjUyhGmJ+jQly90Nak7s01x2Jp1GvBe+Z8BXwkArNOFjLvzQIjdAx7F943/VlLaV9y71DNXVsqhgKdiqjnX3w==", "optional": true, "dependencies": { "@babel/runtime-corejs3": "^7.20.7", - "@swagger-api/apidom-core": "^0.97.0", - "@swagger-api/apidom-ns-openapi-2": "^0.97.0", - "@swagger-api/apidom-parser-adapter-yaml-1-2": "^0.97.0", + "@swagger-api/apidom-core": "^0.99.1", + "@swagger-api/apidom-ns-openapi-2": "^0.99.1", + "@swagger-api/apidom-parser-adapter-yaml-1-2": "^0.99.1", "@types/ramda": "~0.29.6", "ramda": "~0.29.1", "ramda-adjunct": "^4.0.0" } }, "node_modules/@swagger-api/apidom-parser-adapter-openapi-yaml-3-0": { - "version": "0.97.0", - "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-openapi-yaml-3-0/-/apidom-parser-adapter-openapi-yaml-3-0-0.97.0.tgz", - "integrity": "sha512-kBW6atIN0rONf9kjNeE5eHkxb3amfby0vxKfk+9fiRdQbJVCg4UiWOFmU5rD9bc2smtLWSQNkjlMkKS3i2/4Wg==", + "version": "0.99.1", + "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-openapi-yaml-3-0/-/apidom-parser-adapter-openapi-yaml-3-0-0.99.1.tgz", + "integrity": "sha512-TF/yquy1Alce/olQzR5AnjnOx7o7q8MkXMi0JxrtqvMk9Ky//0qFxFGzFQEzA++NaSGt9StG0Pcgp4MGZAzJYg==", "optional": true, "dependencies": { "@babel/runtime-corejs3": "^7.20.7", - "@swagger-api/apidom-core": "^0.97.0", - "@swagger-api/apidom-ns-openapi-3-0": "^0.97.0", - "@swagger-api/apidom-parser-adapter-yaml-1-2": "^0.97.0", + "@swagger-api/apidom-core": "^0.99.1", + "@swagger-api/apidom-ns-openapi-3-0": "^0.99.1", + "@swagger-api/apidom-parser-adapter-yaml-1-2": "^0.99.1", "@types/ramda": "~0.29.6", "ramda": "~0.29.1", "ramda-adjunct": "^4.0.0" } }, "node_modules/@swagger-api/apidom-parser-adapter-openapi-yaml-3-1": { - "version": "0.97.0", - "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-openapi-yaml-3-1/-/apidom-parser-adapter-openapi-yaml-3-1-0.97.0.tgz", - "integrity": "sha512-cclRwQ9IQj6sFLUCDzqRbbbplQfKdt9xz8YONvtq4XBHZO6Ab8z5CF3A9eLiuW1TJZ3y0QU7xmI6h5jWwUrC9w==", + "version": "0.99.1", + "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-openapi-yaml-3-1/-/apidom-parser-adapter-openapi-yaml-3-1-0.99.1.tgz", + "integrity": "sha512-baXbKqjnbmgEmFgCVHlDEiFANHs5lHnnBM0X3k5kNtAVule6Lc5lAZVoySpTGyBJ+4nq4RHNJfbKW8RDHgVMoQ==", "optional": true, "dependencies": { "@babel/runtime-corejs3": "^7.20.7", - "@swagger-api/apidom-core": "^0.97.0", - "@swagger-api/apidom-ns-openapi-3-1": "^0.97.0", - "@swagger-api/apidom-parser-adapter-yaml-1-2": "^0.97.0", + "@swagger-api/apidom-core": "^0.99.1", + "@swagger-api/apidom-ns-openapi-3-1": "^0.99.1", + "@swagger-api/apidom-parser-adapter-yaml-1-2": "^0.99.1", "@types/ramda": "~0.29.6", "ramda": "~0.29.1", "ramda-adjunct": "^4.0.0" } }, "node_modules/@swagger-api/apidom-parser-adapter-workflows-json-1": { - "version": "0.97.0", - "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-workflows-json-1/-/apidom-parser-adapter-workflows-json-1-0.97.0.tgz", - "integrity": "sha512-UvnISzq5JDG43sTIJ2oE8u8qALHmBKbYMGncYgUdlHx7z5RgPAWxIRDWH40YFzUSuKSRNp4TI7eG/9MUd3RnGA==", + "version": "0.99.1", + "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-workflows-json-1/-/apidom-parser-adapter-workflows-json-1-0.99.1.tgz", + "integrity": "sha512-Uu8SaQfl2XiiXDQVRUvUCu3yk7jwHVmwKOoacbJGzPducrR/7/bOe8dNeN4CMRw7HKeRbh02UxXtR46mgBPnog==", "optional": true, "dependencies": { "@babel/runtime-corejs3": "^7.20.7", - "@swagger-api/apidom-core": "^0.97.0", - "@swagger-api/apidom-ns-workflows-1": "^0.97.0", - "@swagger-api/apidom-parser-adapter-json": "^0.97.0", + "@swagger-api/apidom-core": "^0.99.1", + "@swagger-api/apidom-ns-workflows-1": "^0.99.1", + "@swagger-api/apidom-parser-adapter-json": "^0.99.1", "@types/ramda": "~0.29.6", "ramda": "~0.29.1", "ramda-adjunct": "^4.0.0" } }, "node_modules/@swagger-api/apidom-parser-adapter-workflows-yaml-1": { - "version": "0.97.0", - "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-workflows-yaml-1/-/apidom-parser-adapter-workflows-yaml-1-0.97.0.tgz", - "integrity": "sha512-TTZS0YkFvy0X8Huom+fr3muZsCy8mtDpuUks45EvPqv6gjGLCBw3/AZ507CS0YxYvoERbXkYfAYqxW8lptwKuQ==", + "version": "0.99.1", + "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-workflows-yaml-1/-/apidom-parser-adapter-workflows-yaml-1-0.99.1.tgz", + "integrity": "sha512-9DX9X9wxW6TJF5lG0k/w0GxeMPkHACwEQx/QFJqg1YRD3/UWSkBcm567KbfCh5BiDx5p5WAYhTGInQEAF3d0zQ==", "optional": true, "dependencies": { "@babel/runtime-corejs3": "^7.20.7", - "@swagger-api/apidom-core": "^0.97.0", - "@swagger-api/apidom-ns-workflows-1": "^0.97.0", - "@swagger-api/apidom-parser-adapter-yaml-1-2": "^0.97.0", + "@swagger-api/apidom-core": "^0.99.1", + "@swagger-api/apidom-ns-workflows-1": "^0.99.1", + "@swagger-api/apidom-parser-adapter-yaml-1-2": "^0.99.1", "@types/ramda": "~0.29.6", "ramda": "~0.29.1", "ramda-adjunct": "^4.0.0" } }, "node_modules/@swagger-api/apidom-parser-adapter-yaml-1-2": { - "version": "0.97.0", - "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-yaml-1-2/-/apidom-parser-adapter-yaml-1-2-0.97.0.tgz", - "integrity": "sha512-3f1ADjQyKyLnuRhPuoHMgWMW28o0ylohWCQwX4q69CMH0kqGxP7HnqIU/i0I2cxZdjGv72OCdiKwaR/OgHcmEw==", + "version": "0.99.1", + "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-yaml-1-2/-/apidom-parser-adapter-yaml-1-2-0.99.1.tgz", + "integrity": "sha512-MmTDUkrvFIg2AwzaZmiqBifWpoECh7AKeJcAD8Tm+G2/FUmGr3mIr7elc4ehYt/fecSSJEwFGNFU/radKqT/6g==", "optional": true, "dependencies": { "@babel/runtime-corejs3": "^7.20.7", - "@swagger-api/apidom-ast": "^0.97.0", - "@swagger-api/apidom-core": "^0.97.0", - "@swagger-api/apidom-error": "^0.97.0", + "@swagger-api/apidom-ast": "^0.99.1", + "@swagger-api/apidom-core": "^0.99.1", + "@swagger-api/apidom-error": "^0.99.0", "@types/ramda": "~0.29.6", "ramda": "~0.29.1", "ramda-adjunct": "^4.1.1", @@ -2028,12 +2058,12 @@ } }, "node_modules/@swagger-api/apidom-reference": { - "version": "0.97.1", - "resolved": "https://registry.npmjs.org/@swagger-api/apidom-reference/-/apidom-reference-0.97.1.tgz", - "integrity": "sha512-Bs1U2VutmVpqbCxbCt4DTiL8v0s6osAJx+4v49BGrTcfFFh97K/EOAm48WgA8ViP7qHUNBhUF83rjbpEwOshFw==", + "version": "0.99.1", + "resolved": "https://registry.npmjs.org/@swagger-api/apidom-reference/-/apidom-reference-0.99.1.tgz", + "integrity": "sha512-g7xp+ZL/iRX6CEwdUnqqsLfZmaSRlXwEZV8LF1k4k13/o7Qcf7bsPv0fOVGa8ZC29zM8k//FVavwWoXvT2xrFQ==", "dependencies": { "@babel/runtime-corejs3": "^7.20.7", - "@swagger-api/apidom-core": "^0.97.0", + "@swagger-api/apidom-core": "^0.99.1", "@types/ramda": "~0.29.6", "axios": "^1.4.0", "minimatch": "^7.4.3", @@ -2043,27 +2073,27 @@ "stampit": "^4.3.2" }, "optionalDependencies": { - "@swagger-api/apidom-error": "^0.97.0", - "@swagger-api/apidom-json-pointer": "^0.97.0", - "@swagger-api/apidom-ns-asyncapi-2": "^0.97.0", - "@swagger-api/apidom-ns-openapi-2": "^0.97.0", - "@swagger-api/apidom-ns-openapi-3-0": "^0.97.0", - "@swagger-api/apidom-ns-openapi-3-1": "^0.97.0", - "@swagger-api/apidom-ns-workflows-1": "^0.97.0", - "@swagger-api/apidom-parser-adapter-api-design-systems-json": "^0.97.0", - "@swagger-api/apidom-parser-adapter-api-design-systems-yaml": "^0.97.0", - "@swagger-api/apidom-parser-adapter-asyncapi-json-2": "^0.97.0", - "@swagger-api/apidom-parser-adapter-asyncapi-yaml-2": "^0.97.0", - "@swagger-api/apidom-parser-adapter-json": "^0.97.0", - "@swagger-api/apidom-parser-adapter-openapi-json-2": "^0.97.0", - "@swagger-api/apidom-parser-adapter-openapi-json-3-0": "^0.97.0", - "@swagger-api/apidom-parser-adapter-openapi-json-3-1": "^0.97.0", - "@swagger-api/apidom-parser-adapter-openapi-yaml-2": "^0.97.0", - "@swagger-api/apidom-parser-adapter-openapi-yaml-3-0": "^0.97.0", - "@swagger-api/apidom-parser-adapter-openapi-yaml-3-1": "^0.97.0", - "@swagger-api/apidom-parser-adapter-workflows-json-1": "^0.97.0", - "@swagger-api/apidom-parser-adapter-workflows-yaml-1": "^0.97.0", - "@swagger-api/apidom-parser-adapter-yaml-1-2": "^0.97.0" + "@swagger-api/apidom-error": "^0.99.0", + "@swagger-api/apidom-json-pointer": "^0.99.1", + "@swagger-api/apidom-ns-asyncapi-2": "^0.99.1", + "@swagger-api/apidom-ns-openapi-2": "^0.99.1", + "@swagger-api/apidom-ns-openapi-3-0": "^0.99.1", + "@swagger-api/apidom-ns-openapi-3-1": "^0.99.1", + "@swagger-api/apidom-ns-workflows-1": "^0.99.1", + "@swagger-api/apidom-parser-adapter-api-design-systems-json": "^0.99.1", + "@swagger-api/apidom-parser-adapter-api-design-systems-yaml": "^0.99.1", + "@swagger-api/apidom-parser-adapter-asyncapi-json-2": "^0.99.1", + "@swagger-api/apidom-parser-adapter-asyncapi-yaml-2": "^0.99.1", + "@swagger-api/apidom-parser-adapter-json": "^0.99.1", + "@swagger-api/apidom-parser-adapter-openapi-json-2": "^0.99.1", + "@swagger-api/apidom-parser-adapter-openapi-json-3-0": "^0.99.1", + "@swagger-api/apidom-parser-adapter-openapi-json-3-1": "^0.99.1", + "@swagger-api/apidom-parser-adapter-openapi-yaml-2": "^0.99.1", + "@swagger-api/apidom-parser-adapter-openapi-yaml-3-0": "^0.99.1", + "@swagger-api/apidom-parser-adapter-openapi-yaml-3-1": "^0.99.1", + "@swagger-api/apidom-parser-adapter-workflows-json-1": "^0.99.1", + "@swagger-api/apidom-parser-adapter-workflows-yaml-1": "^0.99.1", + "@swagger-api/apidom-parser-adapter-yaml-1-2": "^0.99.1" } }, "node_modules/@swagger-api/apidom-reference/node_modules/minimatch": { @@ -2081,9 +2111,9 @@ } }, "node_modules/@swc/core": { - "version": "1.4.6", - "resolved": "https://registry.npmjs.org/@swc/core/-/core-1.4.6.tgz", - "integrity": "sha512-A7iK9+1qzTCIuc3IYcS8gPHCm9bZVKUJrfNnwveZYyo6OFp3jLno4WOM2yBy5uqedgYATEiWgBYHKq37KrU6IA==", + "version": "1.4.17", + "resolved": "https://registry.npmjs.org/@swc/core/-/core-1.4.17.tgz", + "integrity": "sha512-tq+mdWvodMBNBBZbwFIMTVGYHe9N7zvEaycVVjfvAx20k1XozHbHhRv+9pEVFJjwRxLdXmtvFZd3QZHRAOpoNQ==", "dev": true, "hasInstallScript": true, "dependencies": { @@ -2098,16 +2128,16 @@ "url": "https://opencollective.com/swc" }, "optionalDependencies": { - "@swc/core-darwin-arm64": "1.4.6", - "@swc/core-darwin-x64": "1.4.6", - "@swc/core-linux-arm-gnueabihf": "1.4.6", - "@swc/core-linux-arm64-gnu": "1.4.6", - "@swc/core-linux-arm64-musl": "1.4.6", - "@swc/core-linux-x64-gnu": "1.4.6", - "@swc/core-linux-x64-musl": "1.4.6", - "@swc/core-win32-arm64-msvc": "1.4.6", - "@swc/core-win32-ia32-msvc": "1.4.6", - "@swc/core-win32-x64-msvc": "1.4.6" + "@swc/core-darwin-arm64": "1.4.17", + "@swc/core-darwin-x64": "1.4.17", + "@swc/core-linux-arm-gnueabihf": "1.4.17", + "@swc/core-linux-arm64-gnu": "1.4.17", + "@swc/core-linux-arm64-musl": "1.4.17", + "@swc/core-linux-x64-gnu": "1.4.17", + "@swc/core-linux-x64-musl": "1.4.17", + "@swc/core-win32-arm64-msvc": "1.4.17", + "@swc/core-win32-ia32-msvc": "1.4.17", + "@swc/core-win32-x64-msvc": "1.4.17" }, "peerDependencies": { "@swc/helpers": "^0.5.0" @@ -2119,9 +2149,9 @@ } }, "node_modules/@swc/core-darwin-arm64": { - "version": "1.4.6", - "resolved": "https://registry.npmjs.org/@swc/core-darwin-arm64/-/core-darwin-arm64-1.4.6.tgz", - "integrity": "sha512-bpggpx/BfLFyy48aUKq1PsNUxb7J6CINlpAUk0V4yXfmGnpZH80Gp1pM3GkFDQyCfq7L7IpjPrIjWQwCrL4hYw==", + "version": "1.4.17", + "resolved": "https://registry.npmjs.org/@swc/core-darwin-arm64/-/core-darwin-arm64-1.4.17.tgz", + "integrity": "sha512-HVl+W4LezoqHBAYg2JCqR+s9ife9yPfgWSj37iIawLWzOmuuJ7jVdIB7Ee2B75bEisSEKyxRlTl6Y1Oq3owBgw==", "cpu": [ "arm64" ], @@ -2135,9 +2165,9 @@ } }, "node_modules/@swc/core-darwin-x64": { - "version": "1.4.6", - "resolved": "https://registry.npmjs.org/@swc/core-darwin-x64/-/core-darwin-x64-1.4.6.tgz", - "integrity": "sha512-vJn+/ZuBTg+vtNkcmgZdH6FQpa0hFVdnB9bAeqYwKkyqP15zaPe6jfC+qL2y/cIeC7ASvHXEKrnCZgBLxfVQ9w==", + "version": "1.4.17", + "resolved": "https://registry.npmjs.org/@swc/core-darwin-x64/-/core-darwin-x64-1.4.17.tgz", + "integrity": "sha512-WYRO9Fdzq4S/he8zjW5I95G1zcvyd9yyD3Tgi4/ic84P5XDlSMpBDpBLbr/dCPjmSg7aUXxNQqKqGkl6dQxYlA==", "cpu": [ "x64" ], @@ -2151,9 +2181,9 @@ } }, "node_modules/@swc/core-linux-arm-gnueabihf": { - "version": "1.4.6", - "resolved": "https://registry.npmjs.org/@swc/core-linux-arm-gnueabihf/-/core-linux-arm-gnueabihf-1.4.6.tgz", - "integrity": "sha512-hEmYcB/9XBAl02MtuVHszhNjQpjBzhk/NFulnU33tBMbNZpy2TN5yTsitezMq090QXdDz8sKIALApDyg07ZR8g==", + "version": "1.4.17", + "resolved": "https://registry.npmjs.org/@swc/core-linux-arm-gnueabihf/-/core-linux-arm-gnueabihf-1.4.17.tgz", + "integrity": "sha512-cgbvpWOvtMH0XFjvwppUCR+Y+nf6QPaGu6AQ5hqCP+5Lv2zO5PG0RfasC4zBIjF53xgwEaaWmGP5/361P30X8Q==", "cpu": [ "arm" ], @@ -2167,9 +2197,9 @@ } }, "node_modules/@swc/core-linux-arm64-gnu": { - "version": "1.4.6", - "resolved": "https://registry.npmjs.org/@swc/core-linux-arm64-gnu/-/core-linux-arm64-gnu-1.4.6.tgz", - "integrity": "sha512-/UCYIVoGpm2YVvGHZM2QOA3dexa28BjcpLAIYnoCbgH5f7ulDhE8FAIO/9pasj+kixDBsdqewHfsNXFYlgGJjQ==", + "version": "1.4.17", + "resolved": "https://registry.npmjs.org/@swc/core-linux-arm64-gnu/-/core-linux-arm64-gnu-1.4.17.tgz", + "integrity": "sha512-l7zHgaIY24cF9dyQ/FOWbmZDsEj2a9gRFbmgx2u19e3FzOPuOnaopFj0fRYXXKCmtdx+anD750iBIYnTR+pq/Q==", "cpu": [ "arm64" ], @@ -2183,9 +2213,9 @@ } }, "node_modules/@swc/core-linux-arm64-musl": { - "version": "1.4.6", - "resolved": "https://registry.npmjs.org/@swc/core-linux-arm64-musl/-/core-linux-arm64-musl-1.4.6.tgz", - "integrity": "sha512-LGQsKJ8MA9zZ8xHCkbGkcPSmpkZL2O7drvwsGKynyCttHhpwVjj9lguhD4DWU3+FWIsjvho5Vu0Ggei8OYi/Lw==", + "version": "1.4.17", + "resolved": "https://registry.npmjs.org/@swc/core-linux-arm64-musl/-/core-linux-arm64-musl-1.4.17.tgz", + "integrity": "sha512-qhH4gr9gAlVk8MBtzXbzTP3BJyqbAfUOATGkyUtohh85fPXQYuzVlbExix3FZXTwFHNidGHY8C+ocscI7uDaYw==", "cpu": [ "arm64" ], @@ -2199,9 +2229,9 @@ } }, "node_modules/@swc/core-linux-x64-gnu": { - "version": "1.4.6", - "resolved": "https://registry.npmjs.org/@swc/core-linux-x64-gnu/-/core-linux-x64-gnu-1.4.6.tgz", - "integrity": "sha512-10JL2nLIreMQDKvq2TECnQe5fCuoqBHu1yW8aChqgHUyg9d7gfZX/kppUsuimqcgRBnS0AjTDAA+JF6UsG/2Yg==", + "version": "1.4.17", + "resolved": "https://registry.npmjs.org/@swc/core-linux-x64-gnu/-/core-linux-x64-gnu-1.4.17.tgz", + "integrity": "sha512-vRDFATL1oN5oZMImkwbgSHEkp8xG1ofEASBypze01W1Tqto8t+yo6gsp69wzCZBlxldsvPpvFZW55Jq0Rn+UnA==", "cpu": [ "x64" ], @@ -2215,9 +2245,9 @@ } }, "node_modules/@swc/core-linux-x64-musl": { - "version": "1.4.6", - "resolved": "https://registry.npmjs.org/@swc/core-linux-x64-musl/-/core-linux-x64-musl-1.4.6.tgz", - "integrity": "sha512-EGyjFVzVY6Do89x8sfah7I3cuP4MwtwzmA6OlfD/KASqfCFf5eIaEBMbajgR41bVfMV7lK72lwAIea5xEyq1AQ==", + "version": "1.4.17", + "resolved": "https://registry.npmjs.org/@swc/core-linux-x64-musl/-/core-linux-x64-musl-1.4.17.tgz", + "integrity": "sha512-zQNPXAXn3nmPqv54JVEN8k2JMEcMTQ6veVuU0p5O+A7KscJq+AGle/7ZQXzpXSfUCXlLMX4wvd+rwfGhh3J4cw==", "cpu": [ "x64" ], @@ -2231,9 +2261,9 @@ } }, "node_modules/@swc/core-win32-arm64-msvc": { - "version": "1.4.6", - "resolved": "https://registry.npmjs.org/@swc/core-win32-arm64-msvc/-/core-win32-arm64-msvc-1.4.6.tgz", - "integrity": "sha512-gfW9AuXvwSyK07Vb8Y8E9m2oJZk21WqcD+X4BZhkbKB0TCZK0zk1j/HpS2UFlr1JB2zPKPpSWLU3ll0GEHRG2A==", + "version": "1.4.17", + "resolved": "https://registry.npmjs.org/@swc/core-win32-arm64-msvc/-/core-win32-arm64-msvc-1.4.17.tgz", + "integrity": "sha512-z86n7EhOwyzxwm+DLE5NoLkxCTme2lq7QZlDjbQyfCxOt6isWz8rkW5QowTX8w9Rdmk34ncrjSLvnHOeLY17+w==", "cpu": [ "arm64" ], @@ -2247,9 +2277,9 @@ } }, "node_modules/@swc/core-win32-ia32-msvc": { - "version": "1.4.6", - "resolved": "https://registry.npmjs.org/@swc/core-win32-ia32-msvc/-/core-win32-ia32-msvc-1.4.6.tgz", - "integrity": "sha512-ZuQm81FhhvNVYtVb9GfZ+Du6e7fZlkisWvuCeBeRiyseNt1tcrQ8J3V67jD2nxje8CVXrwG3oUIbPcybv2rxfQ==", + "version": "1.4.17", + "resolved": "https://registry.npmjs.org/@swc/core-win32-ia32-msvc/-/core-win32-ia32-msvc-1.4.17.tgz", + "integrity": "sha512-JBwuSTJIgiJJX6wtr4wmXbfvOswHFj223AumUrK544QV69k60FJ9q2adPW9Csk+a8wm1hLxq4HKa2K334UHJ/g==", "cpu": [ "ia32" ], @@ -2263,9 +2293,9 @@ } }, "node_modules/@swc/core-win32-x64-msvc": { - "version": "1.4.6", - "resolved": "https://registry.npmjs.org/@swc/core-win32-x64-msvc/-/core-win32-x64-msvc-1.4.6.tgz", - "integrity": "sha512-UagPb7w5V0uzWSjrXwOavGa7s9iv3wrVdEgWy+/inm0OwY4lj3zpK9qDnMWAwYLuFwkI3UG4Q3dH8wD+CUUcjw==", + "version": "1.4.17", + "resolved": "https://registry.npmjs.org/@swc/core-win32-x64-msvc/-/core-win32-x64-msvc-1.4.17.tgz", + "integrity": "sha512-jFkOnGQamtVDBm3MF5Kq1lgW8vx4Rm1UvJWRUfg+0gx7Uc3Jp3QMFeMNw/rDNQYRDYPG3yunCC+2463ycd5+dg==", "cpu": [ "x64" ], @@ -2285,17 +2315,20 @@ "dev": true }, "node_modules/@swc/types": { - "version": "0.1.5", - "resolved": "https://registry.npmjs.org/@swc/types/-/types-0.1.5.tgz", - "integrity": "sha512-myfUej5naTBWnqOCc/MdVOLVjXUXtIA+NpDrDBKJtLLg2shUjBu3cZmB/85RyitKc55+lUUyl7oRfLOvkr2hsw==", - "dev": true + "version": "0.1.6", + "resolved": "https://registry.npmjs.org/@swc/types/-/types-0.1.6.tgz", + "integrity": "sha512-/JLo/l2JsT/LRd80C3HfbmVpxOAJ11FO2RCEslFrgzLltoP9j8XIbsyDcfCt2WWyX+CM96rBoNM+IToAkFOugg==", + "dev": true, + "dependencies": { + "@swc/counter": "^0.1.3" + } }, "node_modules/@tanstack/react-virtual": { - "version": "3.1.3", - "resolved": "https://registry.npmjs.org/@tanstack/react-virtual/-/react-virtual-3.1.3.tgz", - "integrity": "sha512-YCzcbF/Ws/uZ0q3Z6fagH+JVhx4JLvbSflgldMgLsuvB8aXjZLLb3HvrEVxY480F9wFlBiXlvQxOyXb5ENPrNA==", + "version": "3.5.0", + "resolved": "https://registry.npmjs.org/@tanstack/react-virtual/-/react-virtual-3.5.0.tgz", + "integrity": "sha512-rtvo7KwuIvqK9zb0VZ5IL7fiJAEnG+0EiFZz8FUOs+2mhGqdGmjKIaT1XU7Zq0eFqL0jonLlhbayJI/J2SA/Bw==", "dependencies": { - "@tanstack/virtual-core": "3.1.3" + "@tanstack/virtual-core": "3.5.0" }, "funding": { "type": "github", @@ -2307,9 +2340,9 @@ } }, "node_modules/@tanstack/virtual-core": { - "version": "3.1.3", - "resolved": "https://registry.npmjs.org/@tanstack/virtual-core/-/virtual-core-3.1.3.tgz", - "integrity": "sha512-Y5B4EYyv1j9V8LzeAoOVeTg0LI7Fo5InYKgAjkY1Pu9GjtUwX/EKxNcU7ng3sKr99WEf+bPTcktAeybyMOYo+g==", + "version": "3.5.0", + "resolved": "https://registry.npmjs.org/@tanstack/virtual-core/-/virtual-core-3.5.0.tgz", + "integrity": "sha512-KnPRCkQTyqhanNC0K63GBG3wA8I+D1fQuVnAvcBF8f13akOKeQp1gSbu6f77zCxhEk727iV5oQnbHLYzHrECLg==", "funding": { "type": "github", "url": "https://github.com/sponsors/tannerlinsley" @@ -2343,45 +2376,38 @@ "dev": true }, "node_modules/@types/prop-types": { - "version": "15.7.11", - "resolved": "https://registry.npmjs.org/@types/prop-types/-/prop-types-15.7.11.tgz", - "integrity": "sha512-ga8y9v9uyeiLdpKddhxYQkxNDrfvuPrlFb0N1qnZZByvcElJaXthF1UhvCh9TLWJBEHeNtdnbysW7Y6Uq8CVng==", + "version": "15.7.12", + "resolved": "https://registry.npmjs.org/@types/prop-types/-/prop-types-15.7.12.tgz", + "integrity": "sha512-5zvhXYtRNRluoE/jAp4GVsSduVUzNWKkOZrCDBWYtE7biZywwdC2AcEzg+cSMLFRfVgeAFqpfNabiPjxFddV1Q==", "devOptional": true }, "node_modules/@types/ramda": { - "version": "0.29.11", - "resolved": "https://registry.npmjs.org/@types/ramda/-/ramda-0.29.11.tgz", - "integrity": "sha512-jm1+PmNOpE7aPS+mMcuB4a72VkCXUJqPSaQRu2YqR8MbsFfaowYXgKxc7bluYdDpRHNXT5Z+xu+Lgr3/ml6wSA==", + "version": "0.29.12", + "resolved": "https://registry.npmjs.org/@types/ramda/-/ramda-0.29.12.tgz", + "integrity": "sha512-sgIEjpJhdQPB52gDF4aphs9nl0xe54CR22DPdWqT8gQHjZYmVApgA0R3/CpMbl0Y8az2TEZrPNL2zy0EvjbkLA==", "dependencies": { - "types-ramda": "^0.29.9" + "types-ramda": "^0.29.10" } }, "node_modules/@types/react": { - "version": "18.2.64", - "resolved": "https://registry.npmjs.org/@types/react/-/react-18.2.64.tgz", - "integrity": "sha512-MlmPvHgjj2p3vZaxbQgFUQFvD8QiZwACfGqEdDSWou5yISWxDQ4/74nCAwsUiX7UFLKZz3BbVSPj+YxeoGGCfg==", + "version": "18.3.1", + "resolved": "https://registry.npmjs.org/@types/react/-/react-18.3.1.tgz", + "integrity": "sha512-V0kuGBX3+prX+DQ/7r2qsv1NsdfnCLnTgnRJ1pYnxykBhGMz+qj+box5lq7XsO5mtZsBqpjwwTu/7wszPfMBcw==", "devOptional": true, "dependencies": { "@types/prop-types": "*", - "@types/scheduler": "*", "csstype": "^3.0.2" } }, "node_modules/@types/react-dom": { - "version": "18.2.21", - "resolved": "https://registry.npmjs.org/@types/react-dom/-/react-dom-18.2.21.tgz", - "integrity": "sha512-gnvBA/21SA4xxqNXEwNiVcP0xSGHh/gi1VhWv9Bl46a0ItbTT5nFY+G9VSQpaG/8N/qdJpJ+vftQ4zflTtnjLw==", + "version": "18.3.0", + "resolved": "https://registry.npmjs.org/@types/react-dom/-/react-dom-18.3.0.tgz", + "integrity": "sha512-EhwApuTmMBmXuFOikhQLIBUn6uFg81SwLMOAUgodJF14SOBOCMdU04gDoYi0WOJJHD144TL32z4yDqCW3dnkQg==", "devOptional": true, "dependencies": { "@types/react": "*" } }, - "node_modules/@types/scheduler": { - "version": "0.16.8", - "resolved": "https://registry.npmjs.org/@types/scheduler/-/scheduler-0.16.8.tgz", - "integrity": "sha512-WZLiwShhwLRmeV6zH+GkbOFT6Z6VklCItrDioxUnv+u4Ll+8vKeFySoFyK/0ctcRpOmwAicELfmys1sDc/Rw+A==", - "devOptional": true - }, "node_modules/@types/semver": { "version": "7.5.8", "resolved": "https://registry.npmjs.org/@types/semver/-/semver-7.5.8.tgz", @@ -2416,25 +2442,25 @@ "integrity": "sha512-EwmlvuaxPNej9+T4v5AuBPJa2x2UOJVdjCtDHgcDqitUeOtjnJKJ+apYjVcAoBEMjKW1VVFGZLUb5+qqa09XFA==" }, "node_modules/@typescript-eslint/eslint-plugin": { - "version": "7.1.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-7.1.1.tgz", - "integrity": "sha512-zioDz623d0RHNhvx0eesUmGfIjzrk18nSBC8xewepKXbBvN/7c1qImV7Hg8TI1URTxKax7/zxfxj3Uph8Chcuw==", + "version": "7.8.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-7.8.0.tgz", + "integrity": "sha512-gFTT+ezJmkwutUPmB0skOj3GZJtlEGnlssems4AjkVweUPGj7jRwwqg0Hhg7++kPGJqKtTYx+R05Ftww372aIg==", "dev": true, "dependencies": { - "@eslint-community/regexpp": "^4.5.1", - "@typescript-eslint/scope-manager": "7.1.1", - "@typescript-eslint/type-utils": "7.1.1", - "@typescript-eslint/utils": "7.1.1", - "@typescript-eslint/visitor-keys": "7.1.1", + "@eslint-community/regexpp": "^4.10.0", + "@typescript-eslint/scope-manager": "7.8.0", + "@typescript-eslint/type-utils": "7.8.0", + "@typescript-eslint/utils": "7.8.0", + "@typescript-eslint/visitor-keys": "7.8.0", "debug": "^4.3.4", "graphemer": "^1.4.0", - "ignore": "^5.2.4", + "ignore": "^5.3.1", "natural-compare": "^1.4.0", - "semver": "^7.5.4", - "ts-api-utils": "^1.0.1" + "semver": "^7.6.0", + "ts-api-utils": "^1.3.0" }, "engines": { - "node": "^16.0.0 || >=18.0.0" + "node": "^18.18.0 || >=20.0.0" }, "funding": { "type": "opencollective", @@ -2451,19 +2477,19 @@ } }, "node_modules/@typescript-eslint/parser": { - "version": "7.1.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-7.1.1.tgz", - "integrity": "sha512-ZWUFyL0z04R1nAEgr9e79YtV5LbafdOtN7yapNbn1ansMyaegl2D4bL7vHoJ4HPSc4CaLwuCVas8CVuneKzplQ==", + "version": "7.8.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-7.8.0.tgz", + "integrity": "sha512-KgKQly1pv0l4ltcftP59uQZCi4HUYswCLbTqVZEJu7uLX8CTLyswqMLqLN+2QFz4jCptqWVV4SB7vdxcH2+0kQ==", "dev": true, "dependencies": { - "@typescript-eslint/scope-manager": "7.1.1", - "@typescript-eslint/types": "7.1.1", - "@typescript-eslint/typescript-estree": "7.1.1", - "@typescript-eslint/visitor-keys": "7.1.1", + "@typescript-eslint/scope-manager": "7.8.0", + "@typescript-eslint/types": "7.8.0", + "@typescript-eslint/typescript-estree": "7.8.0", + "@typescript-eslint/visitor-keys": "7.8.0", "debug": "^4.3.4" }, "engines": { - "node": "^16.0.0 || >=18.0.0" + "node": "^18.18.0 || >=20.0.0" }, "funding": { "type": "opencollective", @@ -2479,16 +2505,16 @@ } }, "node_modules/@typescript-eslint/scope-manager": { - "version": "7.1.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-7.1.1.tgz", - "integrity": "sha512-cirZpA8bJMRb4WZ+rO6+mnOJrGFDd38WoXCEI57+CYBqta8Yc8aJym2i7vyqLL1vVYljgw0X27axkUXz32T8TA==", + "version": "7.8.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-7.8.0.tgz", + "integrity": "sha512-viEmZ1LmwsGcnr85gIq+FCYI7nO90DVbE37/ll51hjv9aG+YZMb4WDE2fyWpUR4O/UrhGRpYXK/XajcGTk2B8g==", "dev": true, "dependencies": { - "@typescript-eslint/types": "7.1.1", - "@typescript-eslint/visitor-keys": "7.1.1" + "@typescript-eslint/types": "7.8.0", + "@typescript-eslint/visitor-keys": "7.8.0" }, "engines": { - "node": "^16.0.0 || >=18.0.0" + "node": "^18.18.0 || >=20.0.0" }, "funding": { "type": "opencollective", @@ -2496,18 +2522,18 @@ } }, "node_modules/@typescript-eslint/type-utils": { - "version": "7.1.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-7.1.1.tgz", - "integrity": "sha512-5r4RKze6XHEEhlZnJtR3GYeCh1IueUHdbrukV2KSlLXaTjuSfeVF8mZUVPLovidCuZfbVjfhi4c0DNSa/Rdg5g==", + "version": "7.8.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-7.8.0.tgz", + "integrity": "sha512-H70R3AefQDQpz9mGv13Uhi121FNMh+WEaRqcXTX09YEDky21km4dV1ZXJIp8QjXc4ZaVkXVdohvWDzbnbHDS+A==", "dev": true, "dependencies": { - "@typescript-eslint/typescript-estree": "7.1.1", - "@typescript-eslint/utils": "7.1.1", + "@typescript-eslint/typescript-estree": "7.8.0", + "@typescript-eslint/utils": "7.8.0", "debug": "^4.3.4", - "ts-api-utils": "^1.0.1" + "ts-api-utils": "^1.3.0" }, "engines": { - "node": "^16.0.0 || >=18.0.0" + "node": "^18.18.0 || >=20.0.0" }, "funding": { "type": "opencollective", @@ -2523,12 +2549,12 @@ } }, "node_modules/@typescript-eslint/types": { - "version": "7.1.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-7.1.1.tgz", - "integrity": "sha512-KhewzrlRMrgeKm1U9bh2z5aoL4s7K3tK5DwHDn8MHv0yQfWFz/0ZR6trrIHHa5CsF83j/GgHqzdbzCXJ3crx0Q==", + "version": "7.8.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-7.8.0.tgz", + "integrity": "sha512-wf0peJ+ZGlcH+2ZS23aJbOv+ztjeeP8uQ9GgwMJGVLx/Nj9CJt17GWgWWoSmoRVKAX2X+7fzEnAjxdvK2gqCLw==", "dev": true, "engines": { - "node": "^16.0.0 || >=18.0.0" + "node": "^18.18.0 || >=20.0.0" }, "funding": { "type": "opencollective", @@ -2536,22 +2562,22 @@ } }, "node_modules/@typescript-eslint/typescript-estree": { - "version": "7.1.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-7.1.1.tgz", - "integrity": "sha512-9ZOncVSfr+sMXVxxca2OJOPagRwT0u/UHikM2Rd6L/aB+kL/QAuTnsv6MeXtjzCJYb8PzrXarypSGIPx3Jemxw==", + "version": "7.8.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-7.8.0.tgz", + "integrity": "sha512-5pfUCOwK5yjPaJQNy44prjCwtr981dO8Qo9J9PwYXZ0MosgAbfEMB008dJ5sNo3+/BN6ytBPuSvXUg9SAqB0dg==", "dev": true, "dependencies": { - "@typescript-eslint/types": "7.1.1", - "@typescript-eslint/visitor-keys": "7.1.1", + "@typescript-eslint/types": "7.8.0", + "@typescript-eslint/visitor-keys": "7.8.0", "debug": "^4.3.4", "globby": "^11.1.0", "is-glob": "^4.0.3", - "minimatch": "9.0.3", - "semver": "^7.5.4", - "ts-api-utils": "^1.0.1" + "minimatch": "^9.0.4", + "semver": "^7.6.0", + "ts-api-utils": "^1.3.0" }, "engines": { - "node": "^16.0.0 || >=18.0.0" + "node": "^18.18.0 || >=20.0.0" }, "funding": { "type": "opencollective", @@ -2564,21 +2590,21 @@ } }, "node_modules/@typescript-eslint/utils": { - "version": "7.1.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-7.1.1.tgz", - "integrity": "sha512-thOXM89xA03xAE0lW7alstvnyoBUbBX38YtY+zAUcpRPcq9EIhXPuJ0YTv948MbzmKh6e1AUszn5cBFK49Umqg==", + "version": "7.8.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-7.8.0.tgz", + "integrity": "sha512-L0yFqOCflVqXxiZyXrDr80lnahQfSOfc9ELAAZ75sqicqp2i36kEZZGuUymHNFoYOqxRT05up760b4iGsl02nQ==", "dev": true, "dependencies": { "@eslint-community/eslint-utils": "^4.4.0", - "@types/json-schema": "^7.0.12", - "@types/semver": "^7.5.0", - "@typescript-eslint/scope-manager": "7.1.1", - "@typescript-eslint/types": "7.1.1", - "@typescript-eslint/typescript-estree": "7.1.1", - "semver": "^7.5.4" + "@types/json-schema": "^7.0.15", + "@types/semver": "^7.5.8", + "@typescript-eslint/scope-manager": "7.8.0", + "@typescript-eslint/types": "7.8.0", + "@typescript-eslint/typescript-estree": "7.8.0", + "semver": "^7.6.0" }, "engines": { - "node": "^16.0.0 || >=18.0.0" + "node": "^18.18.0 || >=20.0.0" }, "funding": { "type": "opencollective", @@ -2589,16 +2615,16 @@ } }, "node_modules/@typescript-eslint/visitor-keys": { - "version": "7.1.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-7.1.1.tgz", - "integrity": "sha512-yTdHDQxY7cSoCcAtiBzVzxleJhkGB9NncSIyMYe2+OGON1ZsP9zOPws/Pqgopa65jvknOjlk/w7ulPlZ78PiLQ==", + "version": "7.8.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-7.8.0.tgz", + "integrity": "sha512-q4/gibTNBQNA0lGyYQCmWRS5D15n8rXh4QjK3KV+MBPlTYHpfBUT3D3PaPR/HeNiI9W6R7FvlkcGhNyAoP+caA==", "dev": true, "dependencies": { - "@typescript-eslint/types": "7.1.1", - "eslint-visitor-keys": "^3.4.1" + "@typescript-eslint/types": "7.8.0", + "eslint-visitor-keys": "^3.4.3" }, "engines": { - "node": "^16.0.0 || >=18.0.0" + "node": "^18.18.0 || >=20.0.0" }, "funding": { "type": "opencollective", @@ -2694,9 +2720,9 @@ "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==" }, "node_modules/aria-hidden": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/aria-hidden/-/aria-hidden-1.2.3.tgz", - "integrity": "sha512-xcLxITLe2HYa1cnYnwCjkOO1PqUHQpozB8x9AR0OgWN2woOBi5kSDVxKfd0b7sb1hw5qFeJhXm9H1nu3xSfLeQ==", + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/aria-hidden/-/aria-hidden-1.2.4.tgz", + "integrity": "sha512-y+CcFFwelSXpLZk/7fMB2mUbGtX9lKycf1MWJ7CaTIERyitVlyQx6C+sxcROU2BAJ24OiZyK+8wj2i8AlBoS3A==", "dependencies": { "tslib": "^2.0.0" }, @@ -2735,11 +2761,11 @@ } }, "node_modules/axios": { - "version": "1.6.7", - "resolved": "https://registry.npmjs.org/axios/-/axios-1.6.7.tgz", - "integrity": "sha512-/hDJGff6/c7u0hDkvkGxR/oy6CbCs8ziCsC7SqmhjfozqiJGc8Z11wrv9z9lYfY4K8l+H9TpjcMDX0xOZmx+RA==", + "version": "1.6.8", + "resolved": "https://registry.npmjs.org/axios/-/axios-1.6.8.tgz", + "integrity": "sha512-v/ZHtJDU39mDpyBoFVkETcd/uNdxrWRrg3bKpOKzXFA6Bvqopts6ALSMU3y6ijYxbw2B+wPrIv46egTzJXCLGQ==", "dependencies": { - "follow-redirects": "^1.15.4", + "follow-redirects": "^1.15.6", "form-data": "^4.0.0", "proxy-from-env": "^1.1.0" } @@ -2935,9 +2961,9 @@ "integrity": "sha512-br21LjYmSlVL0vFCPWPfhzUCT34FM/pAdK7rRIZwa0rrtrIdotvP4Oh4GUHsu2E3IrQMCfRkL/fN3ytMNxVQvg==" }, "node_modules/codemirror-graphql": { - "version": "2.0.10", - "resolved": "https://registry.npmjs.org/codemirror-graphql/-/codemirror-graphql-2.0.10.tgz", - "integrity": "sha512-rC9NxibCsSzWtCQjHLfwKCkyYdGv2BT/BCgyDoKPrc/e7aGiyLyeT0fB60d+0imwlvhX3lIHncl6JMz2YxQ/jg==", + "version": "2.0.11", + "resolved": "https://registry.npmjs.org/codemirror-graphql/-/codemirror-graphql-2.0.11.tgz", + "integrity": "sha512-j1QDDXKVkpin2VsyS0ke2nAhKal6/N1UJtgnBGrPe3gj9ZSP6/K8Xytft94k0xW6giIU/JhZjvW0GwwERNzbFA==", "dependencies": { "@types/codemirror": "^0.0.90", "graphql-language-service": "5.2.0" @@ -3014,9 +3040,9 @@ } }, "node_modules/core-js-pure": { - "version": "3.36.0", - "resolved": "https://registry.npmjs.org/core-js-pure/-/core-js-pure-3.36.0.tgz", - "integrity": "sha512-cN28qmhRNgbMZZMc/RFu5w8pK9VJzpb2rJVR/lHuZJKwmXnoWOpXmMkxqBB514igkp1Hu8WGROsiOAzUcKdHOQ==", + "version": "3.37.0", + "resolved": "https://registry.npmjs.org/core-js-pure/-/core-js-pure-3.37.0.tgz", + "integrity": "sha512-d3BrpyFr5eD4KcbRvQ3FTUx/KWmaDesr7+a3+1+P46IUnNoEt+oiLijPINZMEon7w9oGkIINWxrBAU9DEciwFQ==", "hasInstallScript": true, "funding": { "type": "opencollective", @@ -3126,9 +3152,9 @@ } }, "node_modules/detect-libc": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.0.2.tgz", - "integrity": "sha512-UX6sGumvvqSaXgdKGUsgZWqcUyIXZ/vZTrlRT/iobiKhGL0zL4d3osHj3uqllWJK+i+sixDS/3COVEOFbupFyw==", + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.0.3.tgz", + "integrity": "sha512-bwy0MGW55bG41VqxxypOsdSdGqLwXPI/focwgTYCFMbdUiBAxLg9CFzG08sz2aqzknwiX7Hkl0bQENjg8iLByw==", "optional": true, "engines": { "node": ">=8" @@ -3164,9 +3190,9 @@ } }, "node_modules/dompurify": { - "version": "3.0.9", - "resolved": "https://registry.npmjs.org/dompurify/-/dompurify-3.0.9.tgz", - "integrity": "sha512-uyb4NDIvQ3hRn6NiC+SIFaP4mJ/MdXlvtunaqK9Bn6dD3RuB/1S/gasEjDHD8eiaqdSael2vBv+hOs7Y+jhYOQ==" + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/dompurify/-/dompurify-3.1.0.tgz", + "integrity": "sha512-yoU4rhgPKCo+p5UrWWWNKiIq+ToGqmVVhk0PmMYBK4kRsR3/qhemNFL8f6CFmBd4gMwm3F4T7HBoydP5uY07fA==" }, "node_modules/drange": { "version": "1.1.1", @@ -3186,9 +3212,12 @@ } }, "node_modules/entities": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/entities/-/entities-2.1.0.tgz", - "integrity": "sha512-hCx1oky9PFrJ611mf0ifBLBRW8lUUVRlFolb5gWRfIELabBlbp9xZvrqZLZAs+NxFnbfQoeGd8wDkygjg7U85w==", + "version": "4.5.0", + "resolved": "https://registry.npmjs.org/entities/-/entities-4.5.0.tgz", + "integrity": "sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw==", + "engines": { + "node": ">=0.12" + }, "funding": { "url": "https://github.com/fb55/entities?sponsor=1" } @@ -3213,9 +3242,9 @@ } }, "node_modules/esbuild": { - "version": "0.19.12", - "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.19.12.tgz", - "integrity": "sha512-aARqgq8roFBj054KvQr5f1sFu0D65G+miZRCuJyJ0G13Zwx7vRar5Zhn2tkQNzIXcBrNVsv/8stehpj+GAjgbg==", + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.20.2.tgz", + "integrity": "sha512-WdOOppmUNU+IbZ0PaDiTst80zjnrOkyJNHoKupIcVyU8Lvla3Ugx94VzkQ32Ijqd7UhHJy75gNWDMUekcrSJ6g==", "dev": true, "hasInstallScript": true, "bin": { @@ -3225,29 +3254,29 @@ "node": ">=12" }, "optionalDependencies": { - "@esbuild/aix-ppc64": "0.19.12", - "@esbuild/android-arm": "0.19.12", - "@esbuild/android-arm64": "0.19.12", - "@esbuild/android-x64": "0.19.12", - "@esbuild/darwin-arm64": "0.19.12", - "@esbuild/darwin-x64": "0.19.12", - "@esbuild/freebsd-arm64": "0.19.12", - "@esbuild/freebsd-x64": "0.19.12", - "@esbuild/linux-arm": "0.19.12", - "@esbuild/linux-arm64": "0.19.12", - "@esbuild/linux-ia32": "0.19.12", - "@esbuild/linux-loong64": "0.19.12", - "@esbuild/linux-mips64el": "0.19.12", - "@esbuild/linux-ppc64": "0.19.12", - "@esbuild/linux-riscv64": "0.19.12", - "@esbuild/linux-s390x": "0.19.12", - "@esbuild/linux-x64": "0.19.12", - "@esbuild/netbsd-x64": "0.19.12", - "@esbuild/openbsd-x64": "0.19.12", - "@esbuild/sunos-x64": "0.19.12", - "@esbuild/win32-arm64": "0.19.12", - "@esbuild/win32-ia32": "0.19.12", - "@esbuild/win32-x64": "0.19.12" + "@esbuild/aix-ppc64": "0.20.2", + "@esbuild/android-arm": "0.20.2", + "@esbuild/android-arm64": "0.20.2", + "@esbuild/android-x64": "0.20.2", + "@esbuild/darwin-arm64": "0.20.2", + "@esbuild/darwin-x64": "0.20.2", + "@esbuild/freebsd-arm64": "0.20.2", + "@esbuild/freebsd-x64": "0.20.2", + "@esbuild/linux-arm": "0.20.2", + "@esbuild/linux-arm64": "0.20.2", + "@esbuild/linux-ia32": "0.20.2", + "@esbuild/linux-loong64": "0.20.2", + "@esbuild/linux-mips64el": "0.20.2", + "@esbuild/linux-ppc64": "0.20.2", + "@esbuild/linux-riscv64": "0.20.2", + "@esbuild/linux-s390x": "0.20.2", + "@esbuild/linux-x64": "0.20.2", + "@esbuild/netbsd-x64": "0.20.2", + "@esbuild/openbsd-x64": "0.20.2", + "@esbuild/sunos-x64": "0.20.2", + "@esbuild/win32-arm64": "0.20.2", + "@esbuild/win32-ia32": "0.20.2", + "@esbuild/win32-x64": "0.20.2" } }, "node_modules/escape-string-regexp": { @@ -3318,9 +3347,9 @@ } }, "node_modules/eslint-plugin-react-hooks": { - "version": "4.6.0", - "resolved": "https://registry.npmjs.org/eslint-plugin-react-hooks/-/eslint-plugin-react-hooks-4.6.0.tgz", - "integrity": "sha512-oFc7Itz9Qxh2x4gNHStv3BqJq54ExXmfC+a1NjAta66IAN87Wu0R/QArgIS9qKzX3dXKPI9H5crl9QchNMY9+g==", + "version": "4.6.2", + "resolved": "https://registry.npmjs.org/eslint-plugin-react-hooks/-/eslint-plugin-react-hooks-4.6.2.tgz", + "integrity": "sha512-QzliNJq4GinDBcD8gPB5v0wh6g8q3SUi6EFF0x8N/BL9PoVs0atuGc47ozMRyOWAKdwaZ5OnbOEa3WR+dSGKuQ==", "dev": true, "engines": { "node": ">=10" @@ -3330,9 +3359,9 @@ } }, "node_modules/eslint-plugin-react-refresh": { - "version": "0.4.5", - "resolved": "https://registry.npmjs.org/eslint-plugin-react-refresh/-/eslint-plugin-react-refresh-0.4.5.tgz", - "integrity": "sha512-D53FYKJa+fDmZMtriODxvhwrO+IOqrxoEo21gMA0sjHdU6dPVH4OhyFip9ypl8HOF5RV5KdTo+rBQLvnY2cO8w==", + "version": "0.4.6", + "resolved": "https://registry.npmjs.org/eslint-plugin-react-refresh/-/eslint-plugin-react-refresh-0.4.6.tgz", + "integrity": "sha512-NjGXdm7zgcKRkKMua34qVO9doI7VOxZ6ancSvBELJSSoX97jyndXcSoa8XBh69JoB31dNz3EEzlMcizZl7LaMA==", "dev": true, "peerDependencies": { "eslint": ">=7" @@ -3596,9 +3625,9 @@ "dev": true }, "node_modules/follow-redirects": { - "version": "1.15.5", - "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.5.tgz", - "integrity": "sha512-vSFWUON1B+yAw1VN4xMfxgn5fTUiaOzAJCKBwIIgT/+7CuGy9+r+5gITvP62j3RmaD5Ph65UaERdOSRGUzZtgw==", + "version": "1.15.6", + "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.6.tgz", + "integrity": "sha512-wWN62YITEaOpSK584EZXJafH1AGpO8RVgElfkuXbTOrPX4fIfOyEpW/CsiNd8JdYrAoOvafRTOEnvsO++qCqFA==", "funding": [ { "type": "individual", @@ -3851,14 +3880,14 @@ "dev": true }, "node_modules/graphiql": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/graphiql/-/graphiql-3.1.1.tgz", - "integrity": "sha512-FMNa981Wj8JBJJRTdryNyrVteigS8B7q+Q1fh1rW4IsFPaXNIs1VMs8kwqIZ8zERj4Fc64Ea750g3n6r2w9Zcg==", + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/graphiql/-/graphiql-3.2.0.tgz", + "integrity": "sha512-HHZ9j47IVUdUhdEdOkwD/U3kMGxCGZocEf9rk1aou5lInK9vJRbjlDW4BbG9CvA5fNoe7DevRr72tv0ubvjjPA==", "dependencies": { - "@graphiql/react": "^0.20.3", + "@graphiql/react": "^0.21.0", "@graphiql/toolkit": "^0.9.1", "graphql-language-service": "^5.2.0", - "markdown-it": "^12.2.0" + "markdown-it": "^14.1.0" }, "peerDependencies": { "graphql": "^15.5.0 || ^16.0.0", @@ -3931,9 +3960,9 @@ } }, "node_modules/hasown": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.1.tgz", - "integrity": "sha512-1/th4MHjnwncwXsIW6QMzlvYL9kG5e/CpVvLRZe4XPa8TOUNbCELqmvhDmnkNsAjwaG4+I8gJJL0JBvTTLO9qA==", + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", + "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==", "dependencies": { "function-bind": "^1.1.2" }, @@ -4314,11 +4343,11 @@ } }, "node_modules/linkify-it": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/linkify-it/-/linkify-it-3.0.3.tgz", - "integrity": "sha512-ynTsyrFSdE5oZ/O9GEf00kPngmOfVwazR5GKDq6EYfhlpFug3J2zybX56a2PRRpc9P+FuSoGNAwjlbDs9jJBPQ==", + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/linkify-it/-/linkify-it-5.0.0.tgz", + "integrity": "sha512-5aHCbzQRADcdP+ATqnDuhhJ/MRIqDkZX5pyjFHRRysS8vZ5AbqGEoFIb6pYHPZ+L/OC2Lc+xT8uHVVR5CAK/wQ==", "dependencies": { - "uc.micro": "^1.0.1" + "uc.micro": "^2.0.0" } }, "node_modules/locate-path": { @@ -4388,24 +4417,25 @@ } }, "node_modules/markdown-it": { - "version": "12.3.2", - "resolved": "https://registry.npmjs.org/markdown-it/-/markdown-it-12.3.2.tgz", - "integrity": "sha512-TchMembfxfNVpHkbtriWltGWc+m3xszaRD0CZup7GFFhzIgQqxIfn3eGj1yZpfuflzPvfkt611B2Q/Bsk1YnGg==", + "version": "14.1.0", + "resolved": "https://registry.npmjs.org/markdown-it/-/markdown-it-14.1.0.tgz", + "integrity": "sha512-a54IwgWPaeBCAAsv13YgmALOF1elABB08FxO9i+r4VFk5Vl4pKokRPeX8u5TCgSsPi6ec1otfLjdOpVcgbpshg==", "dependencies": { "argparse": "^2.0.1", - "entities": "~2.1.0", - "linkify-it": "^3.0.1", - "mdurl": "^1.0.1", - "uc.micro": "^1.0.5" + "entities": "^4.4.0", + "linkify-it": "^5.0.0", + "mdurl": "^2.0.0", + "punycode.js": "^2.3.1", + "uc.micro": "^2.1.0" }, "bin": { - "markdown-it": "bin/markdown-it.js" + "markdown-it": "bin/markdown-it.mjs" } }, "node_modules/mdurl": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/mdurl/-/mdurl-1.0.1.tgz", - "integrity": "sha512-/sKlQJCBYVY9Ers9hqzKou4H6V5UWc/M59TH2dvkt+84itfnq7uFOMLpOiOS4ujvHP4etln18fmIxA5R5fll0g==" + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/mdurl/-/mdurl-2.0.0.tgz", + "integrity": "sha512-Lf+9+2r+Tdp5wXDXC4PcIBjTDtq4UKjCPMQhKIuzpJNW0b96kVqSwW0bT7FhRSfmAiFYgP+SCRvdrDozfh0U5w==" }, "node_modules/merge2": { "version": "1.4.1", @@ -4487,9 +4517,9 @@ } }, "node_modules/minimatch": { - "version": "9.0.3", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.3.tgz", - "integrity": "sha512-RHiac9mvaRw0x3AYRgDC1CxAP7HTcNrrECeA8YYJeWnpo+2Q5CegtZjaotWTWxDG3UeGA1coE05iH1mPjT/2mg==", + "version": "9.0.4", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.4.tgz", + "integrity": "sha512-KqWh+VchfxcMNRAJjj2tnsSJdNbHsVgnkBhTNrW7AjVo6OvLtxw8zfT9oLw1JSohlFzJ8jCoTgaoXvJ+kHt6fw==", "dev": true, "dependencies": { "brace-expansion": "^2.0.1" @@ -4558,9 +4588,9 @@ "dev": true }, "node_modules/node-abi": { - "version": "3.56.0", - "resolved": "https://registry.npmjs.org/node-abi/-/node-abi-3.56.0.tgz", - "integrity": "sha512-fZjdhDOeRcaS+rcpve7XuwHBmktS1nS1gzgghwKUQQ8nTy2FdSDr6ZT8k6YhvlJeHmmQMYiT/IH9hfco5zeW2Q==", + "version": "3.62.0", + "resolved": "https://registry.npmjs.org/node-abi/-/node-abi-3.62.0.tgz", + "integrity": "sha512-CPMcGa+y33xuL1E0TcNIu4YyaZCxnnvkVaEXrsosR3FxN+fV8xvb7Mzpb7IgKler10qeMkE6+Dp8qJhpzdq35g==", "optional": true, "dependencies": { "semver": "^7.3.5" @@ -4661,17 +4691,17 @@ } }, "node_modules/optionator": { - "version": "0.9.3", - "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.3.tgz", - "integrity": "sha512-JjCoypp+jKn1ttEFExxhetCKeJt9zhAgAve5FXHixTvFDW/5aEktX9bufBKLRRMdU7bNtpLfcGu94B3cdEJgjg==", + "version": "0.9.4", + "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.4.tgz", + "integrity": "sha512-6IpQ7mKUxRcZNLIObR0hz7lxsapSSIYNZJwXPGeF0mTVqGKFIXj1DQcMoT22S3ROcLyY/rz0PWaWZ9ayWmad9g==", "dev": true, "dependencies": { - "@aashutoshrathi/word-wrap": "^1.2.3", "deep-is": "^0.1.3", "fast-levenshtein": "^2.0.6", "levn": "^0.4.1", "prelude-ls": "^1.2.1", - "type-check": "^0.4.0" + "type-check": "^0.4.0", + "word-wrap": "^1.2.5" }, "engines": { "node": ">= 0.8.0" @@ -4855,9 +4885,9 @@ } }, "node_modules/postcss": { - "version": "8.4.35", - "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.35.tgz", - "integrity": "sha512-u5U8qYpBCpN13BsiEB0CbR1Hhh4Gc0zLFuedrHJKMctHCHAGrMdG0PRM/KErzAL3CU6/eckEtmHNB3x6e3c0vA==", + "version": "8.4.38", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.38.tgz", + "integrity": "sha512-Wglpdk03BSfXkHoQa3b/oulrotAkwrlLDRSOb9D0bN86FdRyE9lppSp33aHNPgBa0JKCoB+drFLZkQoRRYae5A==", "dev": true, "funding": [ { @@ -4876,7 +4906,7 @@ "dependencies": { "nanoid": "^3.3.7", "picocolors": "^1.0.0", - "source-map-js": "^1.0.2" + "source-map-js": "^1.2.0" }, "engines": { "node": "^10 || ^12 || >=14" @@ -4979,10 +5009,18 @@ "node": ">=6" } }, + "node_modules/punycode.js": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/punycode.js/-/punycode.js-2.3.1.tgz", + "integrity": "sha512-uxFIHU0YlHYhDQtV4R9J6a52SLx28BCjT+4ieh7IGbgwVJWO+km431c4yRlREUAsAmt/uMjQUyQHNEPf0M39CA==", + "engines": { + "node": ">=6" + } + }, "node_modules/qs": { - "version": "6.12.0", - "resolved": "https://registry.npmjs.org/qs/-/qs-6.12.0.tgz", - "integrity": "sha512-trVZiI6RMOkO476zLGaBIzszOdFPnCCXHPG9kn0yuS1uz6xdVxPfZdB3vUig9pxPFDM9BRAgz/YUIVQ1/vuiUg==", + "version": "6.12.1", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.12.1.tgz", + "integrity": "sha512-zWmv4RSuB9r2mYQw3zxQuHWeU+42aKi1wWig/j4ele4ygELZ7PEO6MM7rim9oAQH2A5MWfsAVf/jPvTPgCbvUQ==", "dependencies": { "side-channel": "^1.0.6" }, @@ -5087,9 +5125,9 @@ } }, "node_modules/react": { - "version": "18.2.0", - "resolved": "https://registry.npmjs.org/react/-/react-18.2.0.tgz", - "integrity": "sha512-/3IjMdb2L9QbBdWiW5e3P2/npwMBaU9mHCSCUzNln0ZCYbcfTsGbTJrU/kGemdH2IWmB2ioZ+zkxtmq6g09fGQ==", + "version": "18.3.1", + "resolved": "https://registry.npmjs.org/react/-/react-18.3.1.tgz", + "integrity": "sha512-wS+hAgJShR0KhEvPJArfuPVN1+Hz1t0Y6n5jLrGQbkb4urgPE/0Rve+1kMB1v/oWgHgm4WIcV+i7F2pTVj+2iQ==", "dependencies": { "loose-envify": "^1.1.0" }, @@ -5122,15 +5160,15 @@ } }, "node_modules/react-dom": { - "version": "18.2.0", - "resolved": "https://registry.npmjs.org/react-dom/-/react-dom-18.2.0.tgz", - "integrity": "sha512-6IMTriUmvsjHUjNtEDudZfuDQUoWXVxKHhlEGSk81n4YFS+r/Kl99wXiwlVXtPBtJenozv2P+hxDsw9eA7Xo6g==", + "version": "18.3.1", + "resolved": "https://registry.npmjs.org/react-dom/-/react-dom-18.3.1.tgz", + "integrity": "sha512-5m4nQKp+rZRb09LNH59GM4BxTh9251/ylbKIbpe7TpGxfJ+9kv6BLkLBXIjjspbgbnIBNqlI23tRnTWT0snUIw==", "dependencies": { "loose-envify": "^1.1.0", - "scheduler": "^0.23.0" + "scheduler": "^0.23.2" }, "peerDependencies": { - "react": "^18.2.0" + "react": "^18.3.1" } }, "node_modules/react-immutable-proptypes": { @@ -5167,32 +5205,6 @@ "resolved": "https://registry.npmjs.org/react-is/-/react-is-16.13.1.tgz", "integrity": "sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ==" }, - "node_modules/react-redux": { - "version": "9.1.0", - "resolved": "https://registry.npmjs.org/react-redux/-/react-redux-9.1.0.tgz", - "integrity": "sha512-6qoDzIO+gbrza8h3hjMA9aq4nwVFCKFtY2iLxCtVT38Swyy2C/dJCGBXHeHLtx6qlg/8qzc2MrhOeduf5K32wQ==", - "dependencies": { - "@types/use-sync-external-store": "^0.0.3", - "use-sync-external-store": "^1.0.0" - }, - "peerDependencies": { - "@types/react": "^18.2.25", - "react": "^18.0", - "react-native": ">=0.69", - "redux": "^5.0.0" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - }, - "react-native": { - "optional": true - }, - "redux": { - "optional": true - } - } - }, "node_modules/react-remove-scroll": { "version": "2.5.5", "resolved": "https://registry.npmjs.org/react-remove-scroll/-/react-remove-scroll-2.5.5.tgz", @@ -5218,9 +5230,9 @@ } }, "node_modules/react-remove-scroll-bar": { - "version": "2.3.5", - "resolved": "https://registry.npmjs.org/react-remove-scroll-bar/-/react-remove-scroll-bar-2.3.5.tgz", - "integrity": "sha512-3cqjOqg6s0XbOjWvmasmqHch+RLxIEk2r/70rzGXuz3iIGQsQheEQyqYCBb5EECoD01Vo2SIbDqW4paLeLTASw==", + "version": "2.3.6", + "resolved": "https://registry.npmjs.org/react-remove-scroll-bar/-/react-remove-scroll-bar-2.3.6.tgz", + "integrity": "sha512-DtSYaao4mBmX+HDo5YWYdBWQwYIQQshUV/dVxFxK+KM26Wjwp1gZ6rv6OC3oujI6Bfu6Xyg3TwK533AQutsn/g==", "dependencies": { "react-style-singleton": "^2.2.1", "tslib": "^2.0.0" @@ -5413,9 +5425,9 @@ } }, "node_modules/rollup": { - "version": "4.12.1", - "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.12.1.tgz", - "integrity": "sha512-ggqQKvx/PsB0FaWXhIvVkSWh7a/PCLQAsMjBc+nA2M8Rv2/HG0X6zvixAB7KyZBRtifBUhy5k8voQX/mRnABPg==", + "version": "4.17.2", + "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.17.2.tgz", + "integrity": "sha512-/9ClTJPByC0U4zNLowV1tMBe8yMEAxewtR3cUNX5BoEpGH3dQEWpJLr6CLp0fPdYRF/fzVOgvDb1zXuakwF5kQ==", "dev": true, "dependencies": { "@types/estree": "1.0.5" @@ -5428,19 +5440,22 @@ "npm": ">=8.0.0" }, "optionalDependencies": { - "@rollup/rollup-android-arm-eabi": "4.12.1", - "@rollup/rollup-android-arm64": "4.12.1", - "@rollup/rollup-darwin-arm64": "4.12.1", - "@rollup/rollup-darwin-x64": "4.12.1", - "@rollup/rollup-linux-arm-gnueabihf": "4.12.1", - "@rollup/rollup-linux-arm64-gnu": "4.12.1", - "@rollup/rollup-linux-arm64-musl": "4.12.1", - "@rollup/rollup-linux-riscv64-gnu": "4.12.1", - "@rollup/rollup-linux-x64-gnu": "4.12.1", - "@rollup/rollup-linux-x64-musl": "4.12.1", - "@rollup/rollup-win32-arm64-msvc": "4.12.1", - "@rollup/rollup-win32-ia32-msvc": "4.12.1", - "@rollup/rollup-win32-x64-msvc": "4.12.1", + "@rollup/rollup-android-arm-eabi": "4.17.2", + "@rollup/rollup-android-arm64": "4.17.2", + "@rollup/rollup-darwin-arm64": "4.17.2", + "@rollup/rollup-darwin-x64": "4.17.2", + "@rollup/rollup-linux-arm-gnueabihf": "4.17.2", + "@rollup/rollup-linux-arm-musleabihf": "4.17.2", + "@rollup/rollup-linux-arm64-gnu": "4.17.2", + "@rollup/rollup-linux-arm64-musl": "4.17.2", + "@rollup/rollup-linux-powerpc64le-gnu": "4.17.2", + "@rollup/rollup-linux-riscv64-gnu": "4.17.2", + "@rollup/rollup-linux-s390x-gnu": "4.17.2", + "@rollup/rollup-linux-x64-gnu": "4.17.2", + "@rollup/rollup-linux-x64-musl": "4.17.2", + "@rollup/rollup-win32-arm64-msvc": "4.17.2", + "@rollup/rollup-win32-ia32-msvc": "4.17.2", + "@rollup/rollup-win32-x64-msvc": "4.17.2", "fsevents": "~2.3.2" } }, @@ -5487,9 +5502,9 @@ ] }, "node_modules/scheduler": { - "version": "0.23.0", - "resolved": "https://registry.npmjs.org/scheduler/-/scheduler-0.23.0.tgz", - "integrity": "sha512-CtuThmgHNg7zIZWAXi3AsyIzA3n4xx7aNyjwC2VJldO2LMVDhFK+63xGqq6CsJH4rTAt6/M+N4GhZiDYPx9eUw==", + "version": "0.23.2", + "resolved": "https://registry.npmjs.org/scheduler/-/scheduler-0.23.2.tgz", + "integrity": "sha512-UOShsPwz7NrMUqhR6t0hWjFduvOzbtv7toDH1/hIrfRNIDBnnBWd0CwJTGvTpngVlmwGCdP9/Zl/tVrDqcuYzQ==", "dependencies": { "loose-envify": "^1.1.0" } @@ -5523,16 +5538,16 @@ } }, "node_modules/set-function-length": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/set-function-length/-/set-function-length-1.2.1.tgz", - "integrity": "sha512-j4t6ccc+VsKwYHso+kElc5neZpjtq9EnRICFZtWyBsLojhmeF/ZBd/elqm22WJh/BziDe/SBiOeAt0m2mfLD0g==", + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/set-function-length/-/set-function-length-1.2.2.tgz", + "integrity": "sha512-pgRc4hJ4/sNjWCSS9AmnS40x3bNMDTknHgL5UaMBTMyJnU90EgWh1Rz+MC9eFu4BuN/UwZjKQuY/1v3rM7HMfg==", "dependencies": { - "define-data-property": "^1.1.2", + "define-data-property": "^1.1.4", "es-errors": "^1.3.0", "function-bind": "^1.1.2", - "get-intrinsic": "^1.2.3", + "get-intrinsic": "^1.2.4", "gopd": "^1.0.1", - "has-property-descriptors": "^1.0.1" + "has-property-descriptors": "^1.0.2" }, "engines": { "node": ">= 0.4" @@ -5587,9 +5602,9 @@ } }, "node_modules/short-unique-id": { - "version": "5.0.3", - "resolved": "https://registry.npmjs.org/short-unique-id/-/short-unique-id-5.0.3.tgz", - "integrity": "sha512-yhniEILouC0s4lpH0h7rJsfylZdca10W9mDJRAFh3EpcSUanCHGb0R7kcFOIUCZYSAPo0PUD5ZxWQdW0T4xaug==", + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/short-unique-id/-/short-unique-id-5.1.1.tgz", + "integrity": "sha512-qqisAdcWLXSTNK2MKXI66ldHpTKWv+5c28TPG//8Tv9mwC2UL/J/w2EsJaPzVxVRTmoBc4KwGIuZiz58wButfA==", "bin": { "short-unique-id": "bin/short-unique-id", "suid": "bin/short-unique-id" @@ -5667,9 +5682,9 @@ } }, "node_modules/source-map-js": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.0.2.tgz", - "integrity": "sha512-R0XvVJ9WusLiqTCEiGCmICCMplcCkIwwR11mOSD9CR5u+IXYdiseeEuXCVAjS54zqwkLcPNnmU4OeJ6tUrWhDw==", + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.0.tgz", + "integrity": "sha512-itJW8lvSA0TXEphiRoawsCksnlf8SyvmFzIhltqAHluXd88pkCd+cXJVHTDwdCr0IzwptSm035IHQktUu1QUMg==", "dev": true, "engines": { "node": ">=0.10.0" @@ -5754,16 +5769,16 @@ } }, "node_modules/swagger-client": { - "version": "3.26.0", - "resolved": "https://registry.npmjs.org/swagger-client/-/swagger-client-3.26.0.tgz", - "integrity": "sha512-1yFR/S2V3v5DwgmNePoHEjq2dZJxDx1leDQ53r5M4hZs+dozm9VnznlSl9a1V5iTYw4UsS4PQuBRQsmBH21ViA==", + "version": "3.27.2", + "resolved": "https://registry.npmjs.org/swagger-client/-/swagger-client-3.27.2.tgz", + "integrity": "sha512-7dVtvyCXmpHXmv5xgS5DyAyxN17l75qmxN8BCNb/z3sj+kYDsxwJeJP3X6enPyxtZsMZFDMxC+EtiFbml7pS6Q==", "dependencies": { "@babel/runtime-corejs3": "^7.22.15", - "@swagger-api/apidom-core": ">=0.97.0 <1.0.0", - "@swagger-api/apidom-error": ">=0.97.0 <1.0.0", - "@swagger-api/apidom-json-pointer": ">=0.97.0 <1.0.0", - "@swagger-api/apidom-ns-openapi-3-1": ">=0.97.0 <1.0.0", - "@swagger-api/apidom-reference": ">=0.97.0 <1.0.0", + "@swagger-api/apidom-core": ">=0.99.1 <1.0.0", + "@swagger-api/apidom-error": ">=0.99.0 <1.0.0", + "@swagger-api/apidom-json-pointer": ">=0.99.1 <1.0.0", + "@swagger-api/apidom-ns-openapi-3-1": ">=0.99.1 <1.0.0", + "@swagger-api/apidom-reference": ">=0.99.1 <1.0.0", "cookie": "~0.6.0", "deepmerge": "~4.3.0", "fast-json-patch": "^3.0.0-1", @@ -5772,7 +5787,7 @@ "node-abort-controller": "^3.1.1", "node-fetch-commonjs": "^3.3.2", "qs": "^6.10.2", - "traverse": "~0.6.6" + "traverse": "=0.6.8" } }, "node_modules/swagger-client/node_modules/is-plain-object": { @@ -5784,17 +5799,17 @@ } }, "node_modules/swagger-ui-react": { - "version": "5.11.10", - "resolved": "https://registry.npmjs.org/swagger-ui-react/-/swagger-ui-react-5.11.10.tgz", - "integrity": "sha512-X5HwC5h/HN5txkjOmSfL2nuhQH3fkePSdH8rrvqKFKwzZpvUYw0CmwBpBkJyQm24FuI7U9q/k3/ru6dVG32cQw==", + "version": "5.17.2", + "resolved": "https://registry.npmjs.org/swagger-ui-react/-/swagger-ui-react-5.17.2.tgz", + "integrity": "sha512-jwhKQ0IdM1t77clbJ9EorL7+6B5Sr1mG+ryqSELxT5MaG4y3yOIyFbZ0Xn/EnSyRuww/V8FTK/0KIX3gf41taw==", "dependencies": { - "@babel/runtime-corejs3": "^7.24.0", - "@braintree/sanitize-url": "=7.0.0", + "@babel/runtime-corejs3": "^7.24.4", + "@braintree/sanitize-url": "=7.0.1", "base64-js": "^1.5.1", "classnames": "^2.5.1", "css.escape": "1.5.1", "deep-extend": "0.6.0", - "dompurify": "=3.0.9", + "dompurify": "=3.1.0", "ieee754": "^1.2.1", "immutable": "^3.x.x", "js-file-download": "^0.4.12", @@ -5809,7 +5824,7 @@ "react-immutable-proptypes": "2.2.0", "react-immutable-pure-component": "^2.2.0", "react-inspector": "^6.0.1", - "react-redux": "^9.1.0", + "react-redux": "^9.1.1", "react-syntax-highlighter": "^15.5.0", "redux": "^5.0.1", "redux-immutable": "^4.0.0", @@ -5817,7 +5832,7 @@ "reselect": "^5.1.0", "serialize-error": "^8.1.0", "sha.js": "^2.4.11", - "swagger-client": "^3.25.4", + "swagger-client": "^3.27.2", "url-parse": "^1.5.10", "xml": "=1.0.1", "xml-but-prettier": "^1.0.1", @@ -5828,6 +5843,32 @@ "react-dom": ">=16.8.0 <19" } }, + "node_modules/swagger-ui-react/node_modules/react-redux": { + "version": "9.1.1", + "resolved": "https://registry.npmjs.org/react-redux/-/react-redux-9.1.1.tgz", + "integrity": "sha512-5ynfGDzxxsoV73+4czQM56qF43vsmgJsO22rmAvU5tZT2z5Xow/A2uhhxwXuGTxgdReF3zcp7A80gma2onRs1A==", + "dependencies": { + "@types/use-sync-external-store": "^0.0.3", + "use-sync-external-store": "^1.0.0" + }, + "peerDependencies": { + "@types/react": "^18.2.25", + "react": "^18.0", + "react-native": ">=0.69", + "redux": "^5.0.0" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "react-native": { + "optional": true + }, + "redux": { + "optional": true + } + } + }, "node_modules/tar-fs": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/tar-fs/-/tar-fs-2.1.1.tgz", @@ -5932,9 +5973,9 @@ } }, "node_modules/ts-api-utils": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/ts-api-utils/-/ts-api-utils-1.2.1.tgz", - "integrity": "sha512-RIYA36cJn2WiH9Hy77hdF9r7oEwxAtB/TS9/S4Qd90Ap4z5FSiin5zEiTL44OII1Y3IIlEvxwxFUVgrHSZ/UpA==", + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/ts-api-utils/-/ts-api-utils-1.3.0.tgz", + "integrity": "sha512-UQMIo7pb8WRomKR1/+MFVLTroIvDVtMX3K6OUir8ynLyzB8Jeriont2bTAtmNPa1ekAgN7YPDyf6V+ygrdU+eQ==", "dev": true, "engines": { "node": ">=16" @@ -5994,17 +6035,17 @@ } }, "node_modules/types-ramda": { - "version": "0.29.9", - "resolved": "https://registry.npmjs.org/types-ramda/-/types-ramda-0.29.9.tgz", - "integrity": "sha512-B+VbLtW68J4ncG/rccKaYDhlirKlVH/Izh2JZUfaPJv+3Tl2jbbgYsB1pvole1vXKSgaPlAe/wgEdOnMdAu52A==", + "version": "0.29.10", + "resolved": "https://registry.npmjs.org/types-ramda/-/types-ramda-0.29.10.tgz", + "integrity": "sha512-5PJiW/eiTPyXXBYGZOYGezMl6qj7keBiZheRwfjJZY26QPHsNrjfJnz0mru6oeqqoTHOni893Jfd6zyUXfQRWg==", "dependencies": { "ts-toolbelt": "^9.6.0" } }, "node_modules/typescript": { - "version": "5.4.2", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.4.2.tgz", - "integrity": "sha512-+2/g0Fds1ERlP6JsakQQDXjZdZMM+rqpamFZJEKh4kwTIn3iDkgKtby0CeNd5ATNZ4Ry1ax15TMx0W2V+miizQ==", + "version": "5.4.5", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.4.5.tgz", + "integrity": "sha512-vcI4UpRgg81oIRUFwR0WSIHKt11nJ7SAVlYNIu+QpqeyXP+gpQJy/Z4+F0aGxSE4MqwjyXvW/TzgkLAx2AGHwQ==", "dev": true, "bin": { "tsc": "bin/tsc", @@ -6015,9 +6056,9 @@ } }, "node_modules/uc.micro": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/uc.micro/-/uc.micro-1.0.6.tgz", - "integrity": "sha512-8Y75pvTYkLJW2hWQHXxoqRgV7qb9B+9vFEtidML+7koHUFapnVJAZ6cKs+Qjz5Aw3aZWHMC6u0wJE3At+nSGwA==" + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/uc.micro/-/uc.micro-2.1.0.tgz", + "integrity": "sha512-ARDJmphmdvUk6Glw7y9DQ2bFkKBHwQHLi2lsaH6PPmz/Ka9sFOBsBluozhDltWmnv9u/cF6Rt87znRTPV+yp/A==" }, "node_modules/universalify": { "version": "2.0.1", @@ -6051,9 +6092,9 @@ } }, "node_modules/use-callback-ref": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/use-callback-ref/-/use-callback-ref-1.3.1.tgz", - "integrity": "sha512-Lg4Vx1XZQauB42Hw3kK7JM6yjVjgFmFC5/Ab797s79aARomD2nEErc4mCgM8EZrARLmmbWpi5DGCadmK50DcAQ==", + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/use-callback-ref/-/use-callback-ref-1.3.2.tgz", + "integrity": "sha512-elOQwe6Q8gqZgDA8mrh44qRTQqpIHDcZ3hXTLjBe1i4ph8XpNJnO+aQf3NaG+lriLopI4HMx9VjQLfPQ6vhnoA==", "dependencies": { "tslib": "^2.0.0" }, @@ -6092,9 +6133,9 @@ } }, "node_modules/use-sync-external-store": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/use-sync-external-store/-/use-sync-external-store-1.2.0.tgz", - "integrity": "sha512-eEgnFxGQ1Ife9bzYs6VLi8/4X6CObHMw9Qr9tPY43iKwsPw8xE8+EFsf/2cFZ5S3esXgpWgtSCtLNS41F+sKPA==", + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/use-sync-external-store/-/use-sync-external-store-1.2.2.tgz", + "integrity": "sha512-PElTlVMwpblvbNqQ82d2n6RjStvdSoNe9FG28kNfz3WiXilJm4DdNkEzRhCZuIDwY8U08WVihhGR5iRqAwfDiw==", "peerDependencies": { "react": "^16.8.0 || ^17.0.0 || ^18.0.0" } @@ -6106,14 +6147,14 @@ "optional": true }, "node_modules/vite": { - "version": "5.1.5", - "resolved": "https://registry.npmjs.org/vite/-/vite-5.1.5.tgz", - "integrity": "sha512-BdN1xh0Of/oQafhU+FvopafUp6WaYenLU/NFoL5WyJL++GxkNfieKzBhM24H3HVsPQrlAqB7iJYTHabzaRed5Q==", + "version": "5.2.10", + "resolved": "https://registry.npmjs.org/vite/-/vite-5.2.10.tgz", + "integrity": "sha512-PAzgUZbP7msvQvqdSD+ErD5qGnSFiGOoWmV5yAKUEI0kdhjbH6nMWVyZQC/hSc4aXwc0oJ9aEdIiF9Oje0JFCw==", "dev": true, "dependencies": { - "esbuild": "^0.19.3", - "postcss": "^8.4.35", - "rollup": "^4.2.0" + "esbuild": "^0.20.1", + "postcss": "^8.4.38", + "rollup": "^4.13.0" }, "bin": { "vite": "bin/vite.js" @@ -6199,6 +6240,15 @@ "node": ">= 8" } }, + "node_modules/word-wrap": { + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.5.tgz", + "integrity": "sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/wrappy": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", @@ -6231,9 +6281,9 @@ "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" }, "node_modules/yaml": { - "version": "2.4.1", - "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.4.1.tgz", - "integrity": "sha512-pIXzoImaqmfOrL7teGUBt/T7ZDnyeGBWyXQBvOVhLkWLN37GXv8NMLK406UY6dS51JfcQHsmcW5cJ441bHg6Lg==", + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.4.2.tgz", + "integrity": "sha512-B3VqDZ+JAg1nZpaEmWtTXUlBneoGx6CPM9b0TENK6aoSu5t73dItudwdgmi6tHlIZZId4dZ9skcAQ2UbcyAeVA==", "bin": { "yaml": "bin.mjs" }, diff --git a/playground/package.json b/playground/package.json index 0930fe41ea..845369d2c6 100644 --- a/playground/package.json +++ b/playground/package.json @@ -10,23 +10,23 @@ "preview": "vite preview" }, "dependencies": { - "graphiql": "^3.1.1", + "graphiql": "^3.2.0", "graphql": "^16.8.1", - "react": "^18.2.0", - "react-dom": "^18.2.0", - "swagger-ui-react": "^5.11.9" + "react": "^18.3.1", + "react-dom": "^18.3.1", + "swagger-ui-react": "^5.17.2" }, "devDependencies": { - "@types/react": "^18.2.61", - "@types/react-dom": "^18.2.18", + "@types/react": "^18.3.1", + "@types/react-dom": "^18.3.0", "@types/swagger-ui-react": "^4.18.3", - "@typescript-eslint/eslint-plugin": "^7.1.0", - "@typescript-eslint/parser": "^7.1.0", + "@typescript-eslint/eslint-plugin": "^7.7.1", + "@typescript-eslint/parser": "^7.7.1", "@vitejs/plugin-react-swc": "^3.6.0", "eslint": "^8.57.0", - "eslint-plugin-react-hooks": "^4.6.0", - "eslint-plugin-react-refresh": "^0.4.5", - "typescript": "^5.3.3", - "vite": "^5.1.4" + "eslint-plugin-react-hooks": "^4.6.2", + "eslint-plugin-react-refresh": "^0.4.6", + "typescript": "^5.4.5", + "vite": "^5.2.10" } } diff --git a/request/graphql/parser/commit.go b/request/graphql/parser/commit.go index e4d4c01903..a6b468fc35 100644 --- a/request/graphql/parser/commit.go +++ b/request/graphql/parser/commit.go @@ -36,7 +36,7 @@ func parseCommitSelect(schema gql.Schema, parent *gql.Object, field *ast.Field) commit.DocID = immutable.Some(raw.Value) } else if prop == request.Cid { raw := argument.Value.(*ast.StringValue) - commit.Cid = immutable.Some(raw.Value) + commit.CID = immutable.Some(raw.Value) } else if prop == request.FieldIDName { raw := argument.Value.(*ast.StringValue) commit.FieldID = immutable.Some(raw.Value) @@ -112,7 +112,7 @@ func parseCommitSelect(schema gql.Schema, parent *gql.Object, field *ast.Field) return nil, err } - commit.Fields, err = parseSelectFields(schema, request.CommitSelection, fieldObject, field.SelectionSet) + commit.Fields, err = parseSelectFields(schema, fieldObject, field.SelectionSet) return commit, err } diff --git a/request/graphql/parser/mutation.go b/request/graphql/parser/mutation.go index 27becabb71..92071b6e93 100644 --- a/request/graphql/parser/mutation.go +++ b/request/graphql/parser/mutation.go @@ -116,7 +116,7 @@ func parseMutation(schema gql.Schema, parent *gql.Object, field *ast.Field) (*re mut.Filter = filter } else if prop == request.DocIDArgName { raw := argument.Value.(*ast.StringValue) - mut.IDs = immutable.Some([]string{raw.Value}) + mut.DocIDs = immutable.Some([]string{raw.Value}) } else if prop == request.DocIDsArgName { raw := argument.Value.(*ast.ListValue) ids := make([]string, len(raw.Values)) @@ -127,7 +127,7 @@ func parseMutation(schema gql.Schema, parent *gql.Object, field *ast.Field) (*re } ids[i] = id.Value } - mut.IDs = immutable.Some(ids) + mut.DocIDs = immutable.Some(ids) } } @@ -141,7 +141,7 @@ func parseMutation(schema gql.Schema, parent *gql.Object, field *ast.Field) (*re return nil, err } - mut.Fields, err = parseSelectFields(schema, request.ObjectSelection, fieldObject, field.SelectionSet) + mut.Fields, err = parseSelectFields(schema, fieldObject, field.SelectionSet) return mut, err } diff --git a/request/graphql/parser/query.go b/request/graphql/parser/query.go index 3213c7489a..48fde3db1f 100644 --- a/request/graphql/parser/query.go +++ b/request/graphql/parser/query.go @@ -55,14 +55,16 @@ func parseQueryOperationDefinition( Name: parsed.Name, Alias: parsed.Alias, }, - Fields: []request.Selection{ - parsed, + ChildSelect: request.ChildSelect{ + Fields: []request.Selection{ + parsed, + }, }, } } else { // the query doesn't match a reserve name // so its probably a generated query - parsed, err := parseSelect(schema, request.ObjectSelection, schema.QueryType(), node, i) + parsed, err := parseSelect(schema, schema.QueryType(), node, i) if err != nil { return nil, []error{err} } @@ -90,7 +92,6 @@ func parseQueryOperationDefinition( // filters, limits, orders, etc.. func parseSelect( schema gql.Schema, - rootType request.SelectionType, parent *gql.Object, field *ast.Field, index int, @@ -100,7 +101,6 @@ func parseSelect( Name: field.Name.Value, Alias: getFieldAlias(field), }, - Root: rootType, } fieldDef := gql.GetFieldDef(schema, parent, slct.Name) @@ -191,7 +191,7 @@ func parseSelect( return nil, err } - slct.Fields, err = parseSelectFields(schema, slct.Root, fieldObject, field.SelectionSet) + slct.Fields, err = parseSelectFields(schema, fieldObject, field.SelectionSet) if err != nil { return nil, err } @@ -306,10 +306,18 @@ func parseAggregate(schema gql.Schema, parent *gql.Object, field *ast.Field, ind targets[i] = &request.AggregateTarget{ HostName: hostName, ChildName: immutable.Some(childName), - Filter: filter, - Limit: limit, - Offset: offset, - OrderBy: order, + Filterable: request.Filterable{ + Filter: filter, + }, + Limitable: request.Limitable{ + Limit: limit, + }, + Offsetable: request.Offsetable{ + Offset: offset, + }, + Orderable: request.Orderable{ + OrderBy: order, + }, } } } diff --git a/request/graphql/parser/request.go b/request/graphql/parser/request.go index 69d275de03..f0a73a4667 100644 --- a/request/graphql/parser/request.go +++ b/request/graphql/parser/request.go @@ -162,7 +162,6 @@ func getFieldAlias(field *ast.Field) immutable.Option[string] { func parseSelectFields( schema gql.Schema, - root request.SelectionType, parent *gql.Object, fields *ast.SelectionSet) ([]request.Selection, error) { selections := make([]request.Selection, len(fields.Selections)) @@ -179,13 +178,7 @@ func parseSelectFields( } else if node.SelectionSet == nil { // regular field selections[i] = parseField(node) } else { // sub type with extra fields - subroot := root - switch node.Name.Value { - case request.VersionFieldName: - subroot = request.CommitSelection - } - - s, err := parseSelect(schema, subroot, parent, node, i) + s, err := parseSelect(schema, parent, node, i) if err != nil { return nil, err } diff --git a/request/graphql/parser/subscription.go b/request/graphql/parser/subscription.go index 354645beb4..0e6042f931 100644 --- a/request/graphql/parser/subscription.go +++ b/request/graphql/parser/subscription.go @@ -79,6 +79,6 @@ func parseSubscription(schema gql.Schema, field *ast.Field) (*request.ObjectSubs return nil, err } - sub.Fields, err = parseSelectFields(schema, request.ObjectSelection, fieldObject, field.SelectionSet) + sub.Fields, err = parseSelectFields(schema, fieldObject, field.SelectionSet) return sub, err } diff --git a/request/graphql/schema/collection.go b/request/graphql/schema/collection.go index d9ebefa680..937a6e2973 100644 --- a/request/graphql/schema/collection.go +++ b/request/graphql/schema/collection.go @@ -14,6 +14,7 @@ import ( "context" "fmt" "sort" + "strings" "github.com/sourcenetwork/graphql-go/language/ast" gqlp "github.com/sourcenetwork/graphql-go/language/parser" @@ -51,13 +52,13 @@ func fromAst(doc *ast.Document) ( []client.CollectionDefinition, error, ) { - relationManager := NewRelationManager() definitions := []client.CollectionDefinition{} + cTypeByFieldNameByObjName := map[string]map[string]client.CType{} for _, def := range doc.Definitions { switch defType := def.(type) { case *ast.ObjectDefinition: - description, err := collectionFromAstDefinition(relationManager, defType) + description, err := collectionFromAstDefinition(defType, cTypeByFieldNameByObjName) if err != nil { return nil, err } @@ -65,7 +66,7 @@ func fromAst(doc *ast.Document) ( definitions = append(definitions, description) case *ast.InterfaceDefinition: - description, err := schemaFromAstDefinition(relationManager, defType) + description, err := schemaFromAstDefinition(defType, cTypeByFieldNameByObjName) if err != nil { return nil, err } @@ -87,7 +88,7 @@ func fromAst(doc *ast.Document) ( // The details on the relations between objects depend on both sides // of the relationship. The relation manager handles this, and must be applied // after all the collections have been processed. - err := finalizeRelations(relationManager, definitions) + err := finalizeRelations(definitions, cTypeByFieldNameByObjName) if err != nil { return nil, err } @@ -97,25 +98,38 @@ func fromAst(doc *ast.Document) ( // collectionFromAstDefinition parses a AST object definition into a set of collection descriptions. func collectionFromAstDefinition( - relationManager *RelationManager, def *ast.ObjectDefinition, + cTypeByFieldNameByObjName map[string]map[string]client.CType, ) (client.CollectionDefinition, error) { - fieldDescriptions := []client.SchemaFieldDescription{ + schemaFieldDescriptions := []client.SchemaFieldDescription{ { Name: request.DocIDFieldName, Kind: client.FieldKind_DocID, Typ: client.NONE_CRDT, }, } + collectionFieldDescriptions := []client.CollectionFieldDescription{ + { + Name: request.DocIDFieldName, + }, + } + + policyDescription := immutable.None[client.PolicyDescription]() indexDescriptions := []client.IndexDescription{} for _, field := range def.Fields { - tmpFieldsDescriptions, err := fieldsFromAST(field, relationManager, def.Name.Value) + tmpSchemaFieldDescriptions, tmpCollectionFieldDescriptions, err := fieldsFromAST( + field, + def.Name.Value, + cTypeByFieldNameByObjName, + false, + ) if err != nil { return client.CollectionDefinition{}, err } - fieldDescriptions = append(fieldDescriptions, tmpFieldsDescriptions...) + schemaFieldDescriptions = append(schemaFieldDescriptions, tmpSchemaFieldDescriptions...) + collectionFieldDescriptions = append(collectionFieldDescriptions, tmpCollectionFieldDescriptions...) for _, directive := range field.Directives { if directive.Name.Value == types.IndexDirectiveLabel { @@ -129,14 +143,23 @@ func collectionFromAstDefinition( } // sort the fields lexicographically - sort.Slice(fieldDescriptions, func(i, j int) bool { + sort.Slice(schemaFieldDescriptions, func(i, j int) bool { // make sure that the _docID is always at the beginning - if fieldDescriptions[i].Name == request.DocIDFieldName { + if schemaFieldDescriptions[i].Name == request.DocIDFieldName { return true - } else if fieldDescriptions[j].Name == request.DocIDFieldName { + } else if schemaFieldDescriptions[j].Name == request.DocIDFieldName { return false } - return fieldDescriptions[i].Name < fieldDescriptions[j].Name + return schemaFieldDescriptions[i].Name < schemaFieldDescriptions[j].Name + }) + sort.Slice(collectionFieldDescriptions, func(i, j int) bool { + // make sure that the _docID is always at the beginning + if collectionFieldDescriptions[i].Name == request.DocIDFieldName { + return true + } else if collectionFieldDescriptions[j].Name == request.DocIDFieldName { + return false + } + return collectionFieldDescriptions[i].Name < collectionFieldDescriptions[j].Name }) for _, directive := range def.Directives { @@ -147,28 +170,38 @@ func collectionFromAstDefinition( } indexDescriptions = append(indexDescriptions, index) } + if directive.Name.Value == types.PolicySchemaDirectiveLabel { + policy, err := policyFromAST(directive) + if err != nil { + return client.CollectionDefinition{}, err + } + policyDescription = immutable.Some(policy) + } } return client.CollectionDefinition{ Description: client.CollectionDescription{ Name: immutable.Some(def.Name.Value), Indexes: indexDescriptions, + Policy: policyDescription, + Fields: collectionFieldDescriptions, }, Schema: client.SchemaDescription{ Name: def.Name.Value, - Fields: fieldDescriptions, + Fields: schemaFieldDescriptions, }, }, nil } func schemaFromAstDefinition( - relationManager *RelationManager, def *ast.InterfaceDefinition, + cTypeByFieldNameByObjName map[string]map[string]client.CType, ) (client.SchemaDescription, error) { fieldDescriptions := []client.SchemaFieldDescription{} for _, field := range def.Fields { - tmpFieldsDescriptions, err := fieldsFromAST(field, relationManager, def.Name.Value) + // schema-only types do not have collection fields, so we can safely discard any returned here + tmpFieldsDescriptions, _, err := fieldsFromAST(field, def.Name.Value, cTypeByFieldNameByObjName, true) if err != nil { return client.SchemaDescription{}, err } @@ -312,76 +345,135 @@ func indexFromAST(directive *ast.Directive) (client.IndexDescription, error) { return desc, nil } -func fieldsFromAST(field *ast.FieldDefinition, - relationManager *RelationManager, +func fieldsFromAST( + field *ast.FieldDefinition, hostObjectName string, -) ([]client.SchemaFieldDescription, error) { + cTypeByFieldNameByObjName map[string]map[string]client.CType, + schemaOnly bool, +) ([]client.SchemaFieldDescription, []client.CollectionFieldDescription, error) { kind, err := astTypeToKind(field.Type) if err != nil { - return nil, err + return nil, nil, err } - schema := "" - relationName := "" - relationType := relationType(0) + cType, err := setCRDTType(field, kind) + if err != nil { + return nil, nil, err + } - fieldDescriptions := []client.SchemaFieldDescription{} + hostMap := cTypeByFieldNameByObjName[hostObjectName] + if hostMap == nil { + hostMap = map[string]client.CType{} + cTypeByFieldNameByObjName[hostObjectName] = hostMap + } + hostMap[field.Name.Value] = cType - if kind == client.FieldKind_FOREIGN_OBJECT || kind == client.FieldKind_FOREIGN_OBJECT_ARRAY { - if kind == client.FieldKind_FOREIGN_OBJECT { - schema = field.Type.(*ast.Named).Name.Value - relationType = relation_Type_ONE - if _, exists := findDirective(field, "primary"); exists { - relationType |= relation_Type_Primary - } - } else if kind == client.FieldKind_FOREIGN_OBJECT_ARRAY { - schema = field.Type.(*ast.List).Type.(*ast.Named).Name.Value - relationType = relation_Type_MANY - } + schemaFieldDescriptions := []client.SchemaFieldDescription{} + collectionFieldDescriptions := []client.CollectionFieldDescription{} - relationName, err = getRelationshipName(field, hostObjectName, schema) + if kind.IsObject() { + relationName, err := getRelationshipName(field, hostObjectName, kind.Underlying()) if err != nil { - return nil, err + return nil, nil, err } - if kind == client.FieldKind_FOREIGN_OBJECT { - // An _id field is added for every 1-N relationship from this object. - fieldDescriptions = append(fieldDescriptions, client.SchemaFieldDescription{ - Name: fmt.Sprintf("%s_id", field.Name.Value), - Kind: client.FieldKind_DocID, - Typ: defaultCRDTForFieldKind[client.FieldKind_DocID], - RelationName: relationName, - }) + if kind.IsArray() { + if schemaOnly { // todo - document and/or do better + schemaFieldDescriptions = append( + schemaFieldDescriptions, + client.SchemaFieldDescription{ + Name: field.Name.Value, + Kind: kind, + }, + ) + } else { + collectionFieldDescriptions = append( + collectionFieldDescriptions, + client.CollectionFieldDescription{ + Name: field.Name.Value, + Kind: immutable.Some(kind), + RelationName: immutable.Some(relationName), + }, + ) + } + } else { + idFieldName := fmt.Sprintf("%s_id", field.Name.Value) + + collectionFieldDescriptions = append( + collectionFieldDescriptions, + client.CollectionFieldDescription{ + Name: idFieldName, + Kind: immutable.Some[client.FieldKind](client.FieldKind_DocID), + RelationName: immutable.Some(relationName), + }, + ) + + collectionFieldDescriptions = append( + collectionFieldDescriptions, + client.CollectionFieldDescription{ + Name: field.Name.Value, + Kind: immutable.Some(kind), + RelationName: immutable.Some(relationName), + }, + ) + + if _, exists := findDirective(field, "primary"); exists { + // Only primary fields exist on the schema. If primary is automatically set + // (e.g. for one-many) a later step will add this property. + schemaFieldDescriptions = append( + schemaFieldDescriptions, + client.SchemaFieldDescription{ + Name: field.Name.Value, + Kind: kind, + Typ: cType, + }, + ) + } } + } else { + schemaFieldDescriptions = append( + schemaFieldDescriptions, + client.SchemaFieldDescription{ + Name: field.Name.Value, + Kind: kind, + Typ: cType, + }, + ) - // Register the relationship so that the relationship manager can evaluate - // relationsip properties dependent on both collections in the relationship. - _, err := relationManager.RegisterSingle( - relationName, - schema, - field.Name.Value, - relationType, + collectionFieldDescriptions = append( + collectionFieldDescriptions, + client.CollectionFieldDescription{ + Name: field.Name.Value, + }, ) - if err != nil { - return nil, err - } } - cType, err := setCRDTType(field, kind) - if err != nil { - return nil, err - } + return schemaFieldDescriptions, collectionFieldDescriptions, nil +} - fieldDescription := client.SchemaFieldDescription{ - Name: field.Name.Value, - Kind: kind, - Typ: cType, - Schema: schema, - RelationName: relationName, +// policyFromAST returns the policy description after parsing but the validation +// is not done yet on the values that are returned. This is because we need acp to do that. +func policyFromAST(directive *ast.Directive) (client.PolicyDescription, error) { + policyDesc := client.PolicyDescription{} + for _, arg := range directive.Arguments { + switch arg.Name.Value { + case types.PolicySchemaDirectivePropID: + policyIDProp, ok := arg.Value.(*ast.StringValue) + if !ok { + return client.PolicyDescription{}, ErrPolicyInvalidIDProp + } + policyDesc.ID = policyIDProp.Value + case types.PolicySchemaDirectivePropResource: + policyResourceProp, ok := arg.Value.(*ast.StringValue) + if !ok { + return client.PolicyDescription{}, ErrPolicyInvalidResourceProp + } + policyDesc.ResourceName = policyResourceProp.Value + default: + return client.PolicyDescription{}, ErrPolicyWithUnknownArg + } } - - fieldDescriptions = append(fieldDescriptions, fieldDescription) - return fieldDescriptions, nil + return policyDesc, nil } func setCRDTType(field *ast.FieldDefinition, kind client.FieldKind) (client.CType, error) { @@ -401,6 +493,15 @@ func setCRDTType(field *ast.FieldDefinition, kind client.FieldKind) (client.CTyp } } } + + if kind.IsObjectArray() { + return client.NONE_CRDT, nil + } + + if kind.IsObject() { + return client.LWW_REGISTER, nil + } + return defaultCRDTForFieldKind[kind], nil } @@ -430,7 +531,7 @@ func astTypeToKind(t ast.Type) (client.FieldKind, error) { case typeString: return client.FieldKind_STRING_ARRAY, nil default: - return 0, NewErrNonNullForTypeNotSupported(innerAstTypeVal.Type.(*ast.Named).Name.Value) + return client.FieldKind_None, NewErrNonNullForTypeNotSupported(innerAstTypeVal.Type.(*ast.Named).Name.Value) } default: @@ -444,7 +545,7 @@ func astTypeToKind(t ast.Type) (client.FieldKind, error) { case typeString: return client.FieldKind_NILLABLE_STRING_ARRAY, nil default: - return client.FieldKind_FOREIGN_OBJECT_ARRAY, nil + return client.ObjectArrayKind(astTypeVal.Type.(*ast.Named).Name.Value), nil } } @@ -467,14 +568,14 @@ func astTypeToKind(t ast.Type) (client.FieldKind, error) { case typeJSON: return client.FieldKind_NILLABLE_JSON, nil default: - return client.FieldKind_FOREIGN_OBJECT, nil + return client.ObjectKind(astTypeVal.Name.Value), nil } case *ast.NonNull: - return 0, ErrNonNullNotSupported + return client.FieldKind_None, ErrNonNullNotSupported default: - return 0, NewErrTypeNotFound(t.String()) + return client.FieldKind_None, NewErrTypeNotFound(t.String()) } } @@ -513,7 +614,23 @@ func getRelationshipName( return genRelationName(hostName, targetName) } -func finalizeRelations(relationManager *RelationManager, definitions []client.CollectionDefinition) error { +func genRelationName(t1, t2 string) (string, error) { + if t1 == "" || t2 == "" { + return "", client.NewErrUninitializeProperty("genRelationName", "relation types") + } + t1 = strings.ToLower(t1) + t2 = strings.ToLower(t2) + + if i := strings.Compare(t1, t2); i < 0 { + return fmt.Sprintf("%s_%s", t1, t2), nil + } + return fmt.Sprintf("%s_%s", t2, t1), nil +} + +func finalizeRelations( + definitions []client.CollectionDefinition, + cTypeByFieldNameByObjName map[string]map[string]client.CType, +) error { embeddedObjNames := map[string]struct{}{} for _, def := range definitions { if !def.Description.Name.HasValue() { @@ -521,35 +638,91 @@ func finalizeRelations(relationManager *RelationManager, definitions []client.Co } } - for _, definition := range definitions { - for i, field := range definition.Schema.Fields { - if field.RelationName == "" || field.Kind == client.FieldKind_DocID { + for i, definition := range definitions { + if _, ok := embeddedObjNames[definition.Description.Name.Value()]; ok { + // Embedded objects are simpler and require no addition work + continue + } + + for _, field := range definition.Description.Fields { + if !field.Kind.HasValue() || !field.Kind.Value().IsObject() || field.Kind.Value().IsArray() { + // We only need to process the primary side of a relation here, if the field is not a relation + // or if it is an array, we can skip it. continue } - rel, err := relationManager.GetRelation(field.RelationName) - if err != nil { - return err + var otherColDefinition immutable.Option[client.CollectionDefinition] + for _, otherDef := range definitions { + // Check the 'other' schema name, there can only be a one-one mapping in an SDL + // appart from embedded, which will be schema only. + if otherDef.Schema.Name == field.Kind.Value().Underlying() { + otherColDefinition = immutable.Some(otherDef) + break + } } - _, fieldRelationType, ok := rel.getField(field.Schema, field.Name) - if !ok { - return NewErrRelationMissingField(field.Schema, field.Name) + if !otherColDefinition.HasValue() { + // If the other collection is not found here we skip this field. Whilst this almost certainly means the SDL + // is invalid, validating anything beyond SDL syntax is not the responsibility of this package. + continue + } + + var otherColFieldDescription immutable.Option[client.CollectionFieldDescription] + for _, otherField := range otherColDefinition.Value().Description.Fields { + if otherField.RelationName.Value() == field.RelationName.Value() { + otherColFieldDescription = immutable.Some(otherField) + break + } } - // if not finalized then we are missing one side of the relationship - // unless this is an embedded object, which only have single-sided relations - _, shouldBeOneSidedRelation := embeddedObjNames[field.Schema] - if shouldBeOneSidedRelation && rel.finalized { - return NewErrViewRelationMustBeOneSided(field.Name, field.Schema) + if !otherColFieldDescription.HasValue() || otherColFieldDescription.Value().Kind.Value().IsArray() { + // Relations only defined on one side of the object are possible, and so if this is one of them + // or if the other side is an array, we need to add the field to the schema (is primary side). + definition.Schema.Fields = append( + definition.Schema.Fields, + client.SchemaFieldDescription{ + Name: field.Name, + Kind: field.Kind.Value(), + Typ: cTypeByFieldNameByObjName[definition.Schema.Name][field.Name], + }, + ) } - if !shouldBeOneSidedRelation && !rel.finalized { - return client.NewErrRelationOneSided(field.Name, field.Schema) + otherIsEmbedded := len(otherColDefinition.Value().Description.Fields) == 0 + if !otherIsEmbedded { + var schemaFieldIndex int + var schemaFieldExists bool + for i, schemaField := range definition.Schema.Fields { + if schemaField.Name == field.Name { + schemaFieldIndex = i + schemaFieldExists = true + break + } + } + + if schemaFieldExists { + idFieldName := fmt.Sprintf("%s_id", field.Name) + + if _, idFieldExists := definition.Schema.GetFieldByName(idFieldName); !idFieldExists { + existingFields := definition.Schema.Fields + definition.Schema.Fields = make([]client.SchemaFieldDescription, len(definition.Schema.Fields)+1) + copy(definition.Schema.Fields, existingFields[:schemaFieldIndex+1]) + copy(definition.Schema.Fields[schemaFieldIndex+2:], existingFields[schemaFieldIndex+1:]) + + // An _id field is added for every 1-1 or 1-N relationship from this object if the relation + // does not point to an embedded object. + // + // It is inserted immediately after the object field to make things nicer for the user. + definition.Schema.Fields[schemaFieldIndex+1] = client.SchemaFieldDescription{ + Name: idFieldName, + Kind: client.FieldKind_DocID, + Typ: defaultCRDTForFieldKind[client.FieldKind_DocID], + } + } + } } - field.IsPrimaryRelation = fieldRelationType.isSet(relation_Type_Primary) - definition.Schema.Fields[i] = field + definitions[i] = definition } } diff --git a/request/graphql/schema/descriptions.go b/request/graphql/schema/descriptions.go index cb19140d26..dc97705b5d 100644 --- a/request/graphql/schema/descriptions.go +++ b/request/graphql/schema/descriptions.go @@ -18,28 +18,6 @@ import ( ) var ( - // this is only here as a reference, and not to be used - // directly. As it will yield incorrect and unexpected - // results - - //nolint:unused - gqlTypeToFieldKindReference = map[gql.Type]client.FieldKind{ - gql.ID: client.FieldKind_DocID, - gql.Boolean: client.FieldKind_NILLABLE_BOOL, - gql.Int: client.FieldKind_NILLABLE_INT, - gql.Float: client.FieldKind_NILLABLE_FLOAT, - gql.DateTime: client.FieldKind_NILLABLE_DATETIME, - gql.String: client.FieldKind_NILLABLE_STRING, - &gql.Object{}: client.FieldKind_FOREIGN_OBJECT, - &gql.List{}: client.FieldKind_FOREIGN_OBJECT_ARRAY, - // Custom scalars - schemaTypes.BlobScalarType: client.FieldKind_NILLABLE_BLOB, - schemaTypes.JSONScalarType: client.FieldKind_NILLABLE_JSON, - // More custom ones to come - // - JSON - // - Counters - } - fieldKindToGQLType = map[client.FieldKind]gql.Type{ client.FieldKind_DocID: gql.ID, client.FieldKind_NILLABLE_BOOL: gql.Boolean, @@ -59,7 +37,6 @@ var ( client.FieldKind_NILLABLE_JSON: schemaTypes.JSONScalarType, } - // This map is fine to use defaultCRDTForFieldKind = map[client.FieldKind]client.CType{ client.FieldKind_DocID: client.LWW_REGISTER, client.FieldKind_NILLABLE_BOOL: client.LWW_REGISTER, @@ -77,8 +54,6 @@ var ( client.FieldKind_NILLABLE_STRING_ARRAY: client.LWW_REGISTER, client.FieldKind_NILLABLE_BLOB: client.LWW_REGISTER, client.FieldKind_NILLABLE_JSON: client.LWW_REGISTER, - client.FieldKind_FOREIGN_OBJECT: client.LWW_REGISTER, - client.FieldKind_FOREIGN_OBJECT_ARRAY: client.NONE_CRDT, } ) diff --git a/request/graphql/schema/descriptions_test.go b/request/graphql/schema/descriptions_test.go index 93f6b36d48..320bef158a 100644 --- a/request/graphql/schema/descriptions_test.go +++ b/request/graphql/schema/descriptions_test.go @@ -36,6 +36,20 @@ func TestSingleSimpleType(t *testing.T) { Description: client.CollectionDescription{ Name: immutable.Some("User"), Indexes: []client.IndexDescription{}, + Fields: []client.CollectionFieldDescription{ + { + Name: "_docID", + }, + { + Name: "age", + }, + { + Name: "name", + }, + { + Name: "verified", + }, + }, }, Schema: client.SchemaDescription{ Name: "User", @@ -85,6 +99,20 @@ func TestSingleSimpleType(t *testing.T) { Description: client.CollectionDescription{ Name: immutable.Some("User"), Indexes: []client.IndexDescription{}, + Fields: []client.CollectionFieldDescription{ + { + Name: "_docID", + }, + { + Name: "age", + }, + { + Name: "name", + }, + { + Name: "verified", + }, + }, }, Schema: client.SchemaDescription{ Name: "User", @@ -116,6 +144,20 @@ func TestSingleSimpleType(t *testing.T) { Description: client.CollectionDescription{ Name: immutable.Some("Author"), Indexes: []client.IndexDescription{}, + Fields: []client.CollectionFieldDescription{ + { + Name: "_docID", + }, + { + Name: "name", + }, + { + Name: "publisher", + }, + { + Name: "rating", + }, + }, }, Schema: client.SchemaDescription{ Name: "Author", @@ -157,7 +199,7 @@ func TestSingleSimpleType(t *testing.T) { type Author { name: String age: Int - published: Book + published: Book @primary } `, targetDescs: []client.CollectionDefinition{ @@ -165,133 +207,75 @@ func TestSingleSimpleType(t *testing.T) { Description: client.CollectionDescription{ Name: immutable.Some("Book"), Indexes: []client.IndexDescription{}, - }, - Schema: client.SchemaDescription{ - Name: "Book", - Fields: []client.SchemaFieldDescription{ + Fields: []client.CollectionFieldDescription{ { Name: "_docID", - Kind: client.FieldKind_DocID, - Typ: client.NONE_CRDT, }, { Name: "author", - RelationName: "author_book", - Kind: client.FieldKind_FOREIGN_OBJECT, - Typ: client.NONE_CRDT, - Schema: "Author", + Kind: immutable.Some[client.FieldKind](client.ObjectKind("Author")), + RelationName: immutable.Some("author_book"), }, { - Name: "author_id", - Kind: client.FieldKind_DocID, - Typ: client.LWW_REGISTER, + Name: "author_id", + Kind: immutable.Some[client.FieldKind](client.FieldKind_DocID), + RelationName: immutable.Some("author_book"), }, { Name: "name", - Kind: client.FieldKind_NILLABLE_STRING, - Typ: client.LWW_REGISTER, }, { Name: "rating", - Kind: client.FieldKind_NILLABLE_FLOAT, - Typ: client.LWW_REGISTER, }, }, }, - }, - { - Description: client.CollectionDescription{ - Name: immutable.Some("Author"), - Indexes: []client.IndexDescription{}, - }, Schema: client.SchemaDescription{ - Name: "Author", + Name: "Book", Fields: []client.SchemaFieldDescription{ { Name: "_docID", Kind: client.FieldKind_DocID, Typ: client.NONE_CRDT, }, - { - Name: "age", - Kind: client.FieldKind_NILLABLE_INT, - Typ: client.LWW_REGISTER, - }, { Name: "name", Kind: client.FieldKind_NILLABLE_STRING, Typ: client.LWW_REGISTER, }, { - Name: "published", - RelationName: "author_book", - Kind: client.FieldKind_FOREIGN_OBJECT, - Typ: client.NONE_CRDT, - Schema: "Book", - IsPrimaryRelation: true, - }, - { - Name: "published_id", - Kind: client.FieldKind_DocID, + Name: "rating", + Kind: client.FieldKind_NILLABLE_FLOAT, Typ: client.LWW_REGISTER, }, }, }, }, - }, - }, - { - description: "Multiple simple types", - sdl: ` - type User { - name: String - age: Int - verified: Boolean - } - - type Author { - name: String - publisher: String - rating: Float - } - `, - targetDescs: []client.CollectionDefinition{ { Description: client.CollectionDescription{ - Name: immutable.Some("User"), + Name: immutable.Some("Author"), Indexes: []client.IndexDescription{}, - }, - Schema: client.SchemaDescription{ - Name: "User", - Fields: []client.SchemaFieldDescription{ + Fields: []client.CollectionFieldDescription{ { Name: "_docID", - Kind: client.FieldKind_DocID, - Typ: client.NONE_CRDT, }, { Name: "age", - Kind: client.FieldKind_NILLABLE_INT, - Typ: client.LWW_REGISTER, }, { Name: "name", - Kind: client.FieldKind_NILLABLE_STRING, - Typ: client.LWW_REGISTER, }, { - Name: "verified", - Kind: client.FieldKind_NILLABLE_BOOL, - Typ: client.LWW_REGISTER, + Name: "published", + Kind: immutable.Some[client.FieldKind](client.ObjectKind("Book")), + RelationName: immutable.Some("author_book"), + }, + { + Name: "published_id", + Kind: immutable.Some[client.FieldKind](client.FieldKind_DocID), + RelationName: immutable.Some("author_book"), }, }, }, - }, - { - Description: client.CollectionDescription{ - Name: immutable.Some("Author"), - Indexes: []client.IndexDescription{}, - }, Schema: client.SchemaDescription{ Name: "Author", Fields: []client.SchemaFieldDescription{ @@ -300,19 +284,24 @@ func TestSingleSimpleType(t *testing.T) { Kind: client.FieldKind_DocID, Typ: client.NONE_CRDT, }, + { + Name: "age", + Kind: client.FieldKind_NILLABLE_INT, + Typ: client.LWW_REGISTER, + }, { Name: "name", Kind: client.FieldKind_NILLABLE_STRING, Typ: client.LWW_REGISTER, }, { - Name: "publisher", - Kind: client.FieldKind_NILLABLE_STRING, + Name: "published", + Kind: client.ObjectKind("Book"), Typ: client.LWW_REGISTER, }, { - Name: "rating", - Kind: client.FieldKind_NILLABLE_FLOAT, + Name: "published_id", + Kind: client.FieldKind_DocID, Typ: client.LWW_REGISTER, }, }, @@ -332,7 +321,7 @@ func TestSingleSimpleType(t *testing.T) { type Author { name: String age: Int - published: Book @relation(name:"book_authors") + published: Book @relation(name:"book_authors") @primary } `, targetDescs: []client.CollectionDefinition{ @@ -340,26 +329,35 @@ func TestSingleSimpleType(t *testing.T) { Description: client.CollectionDescription{ Name: immutable.Some("Book"), Indexes: []client.IndexDescription{}, - }, - Schema: client.SchemaDescription{ - Name: "Book", - Fields: []client.SchemaFieldDescription{ + Fields: []client.CollectionFieldDescription{ { Name: "_docID", - Kind: client.FieldKind_DocID, - Typ: client.NONE_CRDT, }, { Name: "author", - RelationName: "book_authors", - Kind: client.FieldKind_FOREIGN_OBJECT, - Typ: client.NONE_CRDT, - Schema: "Author", + Kind: immutable.Some[client.FieldKind](client.ObjectKind("Author")), + RelationName: immutable.Some("book_authors"), }, { - Name: "author_id", + Name: "author_id", + Kind: immutable.Some[client.FieldKind](client.FieldKind_DocID), + RelationName: immutable.Some("book_authors"), + }, + { + Name: "name", + }, + { + Name: "rating", + }, + }, + }, + Schema: client.SchemaDescription{ + Name: "Book", + Fields: []client.SchemaFieldDescription{ + { + Name: "_docID", Kind: client.FieldKind_DocID, - Typ: client.LWW_REGISTER, + Typ: client.NONE_CRDT, }, { Name: "name", @@ -378,6 +376,27 @@ func TestSingleSimpleType(t *testing.T) { Description: client.CollectionDescription{ Name: immutable.Some("Author"), Indexes: []client.IndexDescription{}, + Fields: []client.CollectionFieldDescription{ + { + Name: "_docID", + }, + { + Name: "age", + }, + { + Name: "name", + }, + { + Name: "published", + Kind: immutable.Some[client.FieldKind](client.ObjectKind("Book")), + RelationName: immutable.Some("book_authors"), + }, + { + Name: "published_id", + Kind: immutable.Some[client.FieldKind](client.FieldKind_DocID), + RelationName: immutable.Some("book_authors"), + }, + }, }, Schema: client.SchemaDescription{ Name: "Author", @@ -398,12 +417,9 @@ func TestSingleSimpleType(t *testing.T) { Typ: client.LWW_REGISTER, }, { - Name: "published", - RelationName: "book_authors", - Kind: client.FieldKind_FOREIGN_OBJECT, - Typ: client.NONE_CRDT, - Schema: "Book", - IsPrimaryRelation: true, + Name: "published", + Kind: client.ObjectKind("Book"), + Typ: client.LWW_REGISTER, }, { Name: "published_id", @@ -435,6 +451,27 @@ func TestSingleSimpleType(t *testing.T) { Description: client.CollectionDescription{ Name: immutable.Some("Book"), Indexes: []client.IndexDescription{}, + Fields: []client.CollectionFieldDescription{ + { + Name: "_docID", + }, + { + Name: "author", + Kind: immutable.Some[client.FieldKind](client.ObjectKind("Author")), + RelationName: immutable.Some("author_book"), + }, + { + Name: "author_id", + Kind: immutable.Some[client.FieldKind](client.FieldKind_DocID), + RelationName: immutable.Some("author_book"), + }, + { + Name: "name", + }, + { + Name: "rating", + }, + }, }, Schema: client.SchemaDescription{ Name: "Book", @@ -445,12 +482,9 @@ func TestSingleSimpleType(t *testing.T) { Typ: client.NONE_CRDT, }, { - Name: "author", - RelationName: "author_book", - Kind: client.FieldKind_FOREIGN_OBJECT, - Typ: client.NONE_CRDT, - Schema: "Author", - IsPrimaryRelation: true, + Name: "author", + Kind: client.ObjectKind("Author"), + Typ: client.LWW_REGISTER, }, { Name: "author_id", @@ -474,6 +508,27 @@ func TestSingleSimpleType(t *testing.T) { Description: client.CollectionDescription{ Name: immutable.Some("Author"), Indexes: []client.IndexDescription{}, + Fields: []client.CollectionFieldDescription{ + { + Name: "_docID", + }, + { + Name: "age", + }, + { + Name: "name", + }, + { + Name: "published", + Kind: immutable.Some[client.FieldKind](client.ObjectKind("Book")), + RelationName: immutable.Some("author_book"), + }, + { + Name: "published_id", + Kind: immutable.Some[client.FieldKind](client.FieldKind_DocID), + RelationName: immutable.Some("author_book"), + }, + }, }, Schema: client.SchemaDescription{ Name: "Author", @@ -493,18 +548,6 @@ func TestSingleSimpleType(t *testing.T) { Kind: client.FieldKind_NILLABLE_STRING, Typ: client.LWW_REGISTER, }, - { - Name: "published", - RelationName: "author_book", - Kind: client.FieldKind_FOREIGN_OBJECT, - Typ: client.NONE_CRDT, - Schema: "Book", - }, - { - Name: "published_id", - Kind: client.FieldKind_DocID, - Typ: client.LWW_REGISTER, - }, }, }, }, @@ -530,6 +573,27 @@ func TestSingleSimpleType(t *testing.T) { Description: client.CollectionDescription{ Name: immutable.Some("Book"), Indexes: []client.IndexDescription{}, + Fields: []client.CollectionFieldDescription{ + { + Name: "_docID", + }, + { + Name: "author", + Kind: immutable.Some[client.FieldKind](client.ObjectKind("Author")), + RelationName: immutable.Some("author_book"), + }, + { + Name: "author_id", + Kind: immutable.Some[client.FieldKind](client.FieldKind_DocID), + RelationName: immutable.Some("author_book"), + }, + { + Name: "name", + }, + { + Name: "rating", + }, + }, }, Schema: client.SchemaDescription{ Name: "Book", @@ -540,26 +604,23 @@ func TestSingleSimpleType(t *testing.T) { Typ: client.NONE_CRDT, }, { - Name: "author", - RelationName: "author_book", - Kind: client.FieldKind_FOREIGN_OBJECT, - Typ: client.NONE_CRDT, - Schema: "Author", - IsPrimaryRelation: true, + Name: "name", + Kind: client.FieldKind_NILLABLE_STRING, + Typ: client.LWW_REGISTER, }, { - Name: "author_id", - Kind: client.FieldKind_DocID, + Name: "rating", + Kind: client.FieldKind_NILLABLE_FLOAT, Typ: client.LWW_REGISTER, }, { - Name: "name", - Kind: client.FieldKind_NILLABLE_STRING, + Name: "author", + Kind: client.ObjectKind("Author"), Typ: client.LWW_REGISTER, }, { - Name: "rating", - Kind: client.FieldKind_NILLABLE_FLOAT, + Name: "author_id", + Kind: client.FieldKind_DocID, Typ: client.LWW_REGISTER, }, }, @@ -569,6 +630,22 @@ func TestSingleSimpleType(t *testing.T) { Description: client.CollectionDescription{ Name: immutable.Some("Author"), Indexes: []client.IndexDescription{}, + Fields: []client.CollectionFieldDescription{ + { + Name: "_docID", + }, + { + Name: "age", + }, + { + Name: "name", + }, + { + Name: "published", + Kind: immutable.Some[client.FieldKind](client.ObjectArrayKind("Book")), + RelationName: immutable.Some("author_book"), + }, + }, }, Schema: client.SchemaDescription{ Name: "Author", @@ -588,13 +665,6 @@ func TestSingleSimpleType(t *testing.T) { Kind: client.FieldKind_NILLABLE_STRING, Typ: client.LWW_REGISTER, }, - { - Name: "published", - RelationName: "author_book", - Kind: client.FieldKind_FOREIGN_OBJECT_ARRAY, - Typ: client.NONE_CRDT, - Schema: "Book", - }, }, }, }, @@ -616,6 +686,7 @@ func runCreateDescriptionTest(t *testing.T, testcase descriptionTestCase) { for i, d := range descs { assert.Equal(t, testcase.targetDescs[i].Description, d.Description, testcase.description) + assert.Equal(t, testcase.targetDescs[i].Schema, d.Schema, testcase.description) } } diff --git a/request/graphql/schema/errors.go b/request/graphql/schema/errors.go index e832e687ee..304df792e6 100644 --- a/request/graphql/schema/errors.go +++ b/request/graphql/schema/errors.go @@ -27,7 +27,9 @@ const ( errIndexUnknownArgument string = "index with unknown argument" errIndexInvalidArgument string = "index with invalid argument" errIndexInvalidName string = "index with invalid name" - errViewRelationMustBeOneSided string = "relations in views must only be defined on one schema" + errPolicyUnknownArgument string = "policy with unknown argument" + errPolicyInvalidIDProp string = "policy directive with invalid id property" + errPolicyInvalidResourceProp string = "policy directive with invalid resource property" ) var ( @@ -47,11 +49,13 @@ var ( ErrMultipleRelationPrimaries = errors.New("relation can only have a single field set as primary") // NonNull is the literal name of the GQL type, so we have to disable the linter //nolint:revive - ErrNonNullNotSupported = errors.New("NonNull fields are not currently supported") - ErrIndexMissingFields = errors.New(errIndexMissingFields) - ErrIndexWithUnknownArg = errors.New(errIndexUnknownArgument) - ErrIndexWithInvalidArg = errors.New(errIndexInvalidArgument) - ErrViewRelationMustBeOneSided = errors.New(errViewRelationMustBeOneSided) + ErrNonNullNotSupported = errors.New("NonNull fields are not currently supported") + ErrIndexMissingFields = errors.New(errIndexMissingFields) + ErrIndexWithUnknownArg = errors.New(errIndexUnknownArgument) + ErrIndexWithInvalidArg = errors.New(errIndexInvalidArgument) + ErrPolicyWithUnknownArg = errors.New(errPolicyUnknownArgument) + ErrPolicyInvalidIDProp = errors.New(errPolicyInvalidIDProp) + ErrPolicyInvalidResourceProp = errors.New(errPolicyInvalidResourceProp) ) func NewErrDuplicateField(objectName, fieldName string) error { @@ -132,11 +136,3 @@ func NewErrRelationNotFound(relationName string) error { errors.NewKV("RelationName", relationName), ) } - -func NewErrViewRelationMustBeOneSided(fieldName string, typeName string) error { - return errors.New( - errViewRelationMustBeOneSided, - errors.NewKV("Field", fieldName), - errors.NewKV("Type", typeName), - ) -} diff --git a/request/graphql/schema/generate.go b/request/graphql/schema/generate.go index e4397e2e40..6b7483be4f 100644 --- a/request/graphql/schema/generate.go +++ b/request/graphql/schema/generate.go @@ -414,7 +414,7 @@ func (g *Generator) buildTypes( // will be reassigned before the thunk is run // TODO remove when Go 1.22 collection := c - fieldDescriptions := collection.Schema.Fields + fieldDescriptions := collection.GetFields() isEmbeddedObject := !collection.Description.Name.HasValue() isQuerySource := len(collection.Description.QuerySources()) > 0 isViewObject := isEmbeddedObject || isQuerySource @@ -460,16 +460,16 @@ func (g *Generator) buildTypes( } var ttype gql.Type - if field.Kind == client.FieldKind_FOREIGN_OBJECT { + if field.Kind.IsObject() && !field.Kind.IsArray() { var ok bool - ttype, ok = g.manager.schema.TypeMap()[field.Schema] + ttype, ok = g.manager.schema.TypeMap()[field.Kind.Underlying()] if !ok { - return nil, NewErrTypeNotFound(field.Schema) + return nil, NewErrTypeNotFound(field.Kind.Underlying()) } - } else if field.Kind == client.FieldKind_FOREIGN_OBJECT_ARRAY { - t, ok := g.manager.schema.TypeMap()[field.Schema] + } else if field.Kind.IsObjectArray() { + t, ok := g.manager.schema.TypeMap()[field.Kind.Underlying()] if !ok { - return nil, NewErrTypeNotFound(field.Schema) + return nil, NewErrTypeNotFound(field.Kind.Underlying()) } ttype = gql.NewList(t) } else { @@ -540,7 +540,6 @@ func (g *Generator) buildMutationInputTypes(collections []client.CollectionDefin // will be reassigned before the thunk is run // TODO remove when Go 1.22 collection := c - fieldDescriptions := collection.Schema.Fields mutationInputName := collection.Description.Name.Value() + "MutationInputArg" // check if mutation input type exists @@ -558,7 +557,7 @@ func (g *Generator) buildMutationInputTypes(collections []client.CollectionDefin mutationObjConf.Fields = (gql.InputObjectConfigFieldMapThunk)(func() (gql.InputObjectConfigFieldMap, error) { fields := make(gql.InputObjectConfigFieldMap) - for _, field := range fieldDescriptions { + for _, field := range collection.GetFields() { if strings.HasPrefix(field.Name, "_") { // ignore system defined args as the // user cannot override their values @@ -566,9 +565,9 @@ func (g *Generator) buildMutationInputTypes(collections []client.CollectionDefin } var ttype gql.Type - if field.Kind == client.FieldKind_FOREIGN_OBJECT { + if field.Kind.IsObject() && !field.Kind.IsArray() { ttype = gql.ID - } else if field.Kind == client.FieldKind_FOREIGN_OBJECT_ARRAY { + } else if field.Kind.IsObjectArray() { ttype = gql.NewList(gql.ID) } else { var ok bool diff --git a/request/graphql/schema/manager.go b/request/graphql/schema/manager.go index 89860d2c53..f4a2cb3e5b 100644 --- a/request/graphql/schema/manager.go +++ b/request/graphql/schema/manager.go @@ -113,6 +113,7 @@ func defaultDirectivesType() []*gql.Directive { return []*gql.Directive{ schemaTypes.CRDTFieldDirective, schemaTypes.ExplainDirective, + schemaTypes.PolicyDirective, schemaTypes.IndexDirective, schemaTypes.IndexFieldDirective, schemaTypes.PrimaryDirective, diff --git a/request/graphql/schema/relations.go b/request/graphql/schema/relations.go deleted file mode 100644 index e6d2af8b09..0000000000 --- a/request/graphql/schema/relations.go +++ /dev/null @@ -1,171 +0,0 @@ -// Copyright 2022 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -package schema - -import ( - "fmt" - "strings" - - "github.com/sourcenetwork/defradb/client" -) - -// relationType describes the type of relation between two types. -type relationType uint8 - -const ( - relation_Type_ONE relationType = 1 // 0b0000 0001 - relation_Type_MANY relationType = 2 // 0b0000 0010 - relation_Type_Primary relationType = 128 // 0b1000 0000 Primary reference entity on relation -) - -// IsSet returns true if the target relation type is set. -func (m relationType) isSet(target relationType) bool { - return m&target > 0 -} - -// RelationManager keeps track of all the relations that exist -// between schema types -type RelationManager struct { - relations map[string]*Relation -} - -func NewRelationManager() *RelationManager { - return &RelationManager{ - relations: make(map[string]*Relation), - } -} - -func (rm *RelationManager) GetRelation(name string) (*Relation, error) { - rel, ok := rm.relations[name] - if !ok { - return nil, NewErrRelationNotFound(name) - } - return rel, nil -} - -// RegisterSingle is used if you only know a single side of the relation -// at a time. It allows you to iteratively, across two calls, build the relation. -// If the relation exists and is finalized, then nothing is done. Returns true -// if nothing is done or the relation is successfully registered. -func (rm *RelationManager) RegisterSingle( - name string, - schemaType string, - schemaField string, - relType relationType, -) (bool, error) { - if name == "" { - return false, client.NewErrUninitializeProperty("RegisterSingle", "name") - } - - rel, ok := rm.relations[name] - if !ok { - // If a relation doesn't exist then make one. - rm.relations[name] = &Relation{ - name: name, - types: []relationType{relType}, - schemaTypes: []string{schemaType}, - fields: []string{schemaField}, - } - return true, nil - } - - if !rel.finalized { - // If a relation exists, and is not finalized, then finalizing it. - rel.types = append(rel.types, relType) - rel.schemaTypes = append(rel.schemaTypes, schemaType) - rel.fields = append(rel.fields, schemaField) - - if err := rel.finalize(); err != nil { - return false, err - } - rm.relations[name] = rel - } - - return true, nil -} - -type Relation struct { - name string - types []relationType - schemaTypes []string - fields []string - - // finalized indicates if we've properly - // updated both sides of the relation - finalized bool -} - -func (r *Relation) finalize() error { - // make sure all the types/fields are set - if len(r.types) != 2 || len(r.schemaTypes) != 2 || len(r.fields) != 2 { - return ErrRelationMissingTypes - } - - if isOne(r.types[0]) && isMany(r.types[1]) { - r.types[0] |= relation_Type_Primary // set primary on one - r.types[1] &^= relation_Type_Primary // clear primary on many - } else if isOne(r.types[1]) && isMany(r.types[0]) { - r.types[1] |= relation_Type_Primary // set primary on one - r.types[0] &^= relation_Type_Primary // clear primary on many - } else if isOne(r.types[1]) && isOne(r.types[0]) { - t1, t2 := r.types[0], r.types[1] - aBit := t1 & t2 - xBit := t1 ^ t2 - - // both types have primary set - if aBit.isSet(relation_Type_Primary) { - return ErrMultipleRelationPrimaries - } else if !xBit.isSet(relation_Type_Primary) { - // neither type has primary set, auto add to - // lexicographically first one by schema type name - if strings.Compare(r.schemaTypes[0], r.schemaTypes[1]) < 1 { - r.types[1] = r.types[1] | relation_Type_Primary - } else { - r.types[0] = r.types[0] | relation_Type_Primary - } - } - } - - r.finalized = true - return nil -} - -func (r Relation) getField(schemaType string, field string) (string, relationType, bool) { - for i, f := range r.fields { - if f == field && r.schemaTypes[i] == schemaType { - return f, r.types[i], true - } - } - return "", relationType(0), false -} - -func genRelationName(t1, t2 string) (string, error) { - if t1 == "" || t2 == "" { - return "", client.NewErrUninitializeProperty("genRelationName", "relation types") - } - t1 = strings.ToLower(t1) - t2 = strings.ToLower(t2) - - if i := strings.Compare(t1, t2); i < 0 { - return fmt.Sprintf("%s_%s", t1, t2), nil - } - return fmt.Sprintf("%s_%s", t2, t1), nil -} - -// isOne returns true if the Relation_ONE bit is set -func isOne(fieldmeta relationType) bool { - return fieldmeta.isSet(relation_Type_ONE) -} - -// isMany returns true if the Relation_ONE bit is set -func isMany(fieldmeta relationType) bool { - return fieldmeta.isSet(relation_Type_MANY) -} diff --git a/request/graphql/schema/types/types.go b/request/graphql/schema/types/types.go index 37cb840d05..7865e204db 100644 --- a/request/graphql/schema/types/types.go +++ b/request/graphql/schema/types/types.go @@ -29,6 +29,10 @@ const ( CRDTDirectiveLabel = "crdt" CRDTDirectivePropType = "type" + PolicySchemaDirectiveLabel = "policy" + PolicySchemaDirectivePropID = "id" + PolicySchemaDirectivePropResource = "resource" + IndexDirectiveLabel = "index" IndexDirectivePropName = "name" IndexDirectivePropUnique = "unique" @@ -94,6 +98,22 @@ var ( }, }) + PolicyDirective *gql.Directive = gql.NewDirective(gql.DirectiveConfig{ + Name: PolicySchemaDirectiveLabel, + Description: "@policy is a directive that can be used to link a policy on a collection type.", + Args: gql.FieldConfigArgument{ + PolicySchemaDirectivePropID: &gql.ArgumentConfig{ + Type: gql.String, + }, + PolicySchemaDirectivePropResource: &gql.ArgumentConfig{ + Type: gql.String, + }, + }, + Locations: []string{ + gql.DirectiveLocationObject, + }, + }) + IndexDirective *gql.Directive = gql.NewDirective(gql.DirectiveConfig{ Name: IndexDirectiveLabel, Description: "@index is a directive that can be used to create an index on a type.", @@ -141,8 +161,20 @@ var ( Description: "Last Write Wins register", }, client.PN_COUNTER.String(): &gql.EnumValueConfig{ - Value: client.PN_COUNTER, - Description: "Positive-Negative Counter", + Value: client.PN_COUNTER, + Description: `Positive-Negative Counter. + +WARNING: Incrementing an integer and causing it to overflow the int64 max value +will cause the value to roll over to the int64 min value. Incremeting a float and +causing it to overflow the float64 max value will act like a no-op.`, + }, + client.P_COUNTER.String(): &gql.EnumValueConfig{ + Value: client.P_COUNTER, + Description: `Positive Counter. + +WARNING: Incrementing an integer and causing it to overflow the int64 max value +will cause the value to roll over to the int64 min value. Incremeting a float and +causing it to overflow the float64 max value will act like a no-op.`, }, }, }) diff --git a/tests/bench/bench_util.go b/tests/bench/bench_util.go index d7c00bd664..186dbc0f3e 100644 --- a/tests/bench/bench_util.go +++ b/tests/bench/bench_util.go @@ -20,10 +20,10 @@ import ( ds "github.com/ipfs/go-datastore" "github.com/sourcenetwork/badger/v4" + "github.com/sourcenetwork/corelog" "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/errors" - "github.com/sourcenetwork/defradb/logging" "github.com/sourcenetwork/defradb/tests/bench/fixtures" testutils "github.com/sourcenetwork/defradb/tests/integration" ) @@ -35,12 +35,10 @@ const ( var ( storage string = "memory" - log = logging.MustNewLogger("tests.bench") + log = corelog.NewLogger("tests.bench") ) func init() { - logging.SetConfig(logging.Config{Level: logging.NewLogLevelOption(logging.Error)}) - // assign if not empty if s := os.Getenv(storageEnvName); s != "" { storage = s @@ -161,7 +159,7 @@ func BackfillBenchmarkDB( // create the documents docIDs := make([]client.DocID, numTypes) for j := 0; j < numTypes; j++ { - doc, err := client.NewDocFromJSON([]byte(docs[j]), cols[j].Schema()) + doc, err := client.NewDocFromJSON([]byte(docs[j]), cols[j].Definition()) if err != nil { errCh <- errors.Wrap("failed to create document from fixture", err) return @@ -174,10 +172,10 @@ func BackfillBenchmarkDB( for { if err := cols[j].Create(ctx, doc); err != nil && err.Error() == badger.ErrConflict.Error() { - log.Info( + log.InfoContext( ctx, "Failed to commit TX for doc %s, retrying...\n", - logging.NewKV("DocID", doc.ID()), + corelog.Any("DocID", doc.ID()), ) continue } else if err != nil { diff --git a/tests/bench/collection/utils.go b/tests/bench/collection/utils.go index a1bed37d3a..24c90b28b5 100644 --- a/tests/bench/collection/utils.go +++ b/tests/bench/collection/utils.go @@ -72,7 +72,11 @@ func runCollectionBenchGetSync(b *testing.B, for i := 0; i < b.N; i++ { // outer benchmark loop for j := 0; j < opCount/numTypes; j++ { // number of Get operations we want to execute for k := 0; k < numTypes; k++ { // apply op to all the related types - collections[k].Get(ctx, listOfDocIDs[j][k], false) //nolint:errcheck + collections[k].Get( //nolint:errcheck + ctx, + listOfDocIDs[j][k], + false, + ) } } } @@ -98,7 +102,11 @@ func runCollectionBenchGetAsync(b *testing.B, for k := 0; k < numTypes; k++ { // apply op to all the related types wg.Add(1) go func(ctx context.Context, col client.Collection, docID client.DocID) { - col.Get(ctx, docID, false) //nolint:errcheck + col.Get( //nolint:errcheck + ctx, + docID, + false, + ) wg.Done() }(ctx, collections[k], listOfDocIDs[j][k]) } @@ -170,7 +178,7 @@ func runCollectionBenchCreateMany( docs := make([]*client.Document, opCount) for j := 0; j < opCount; j++ { d, _ := fixture.GenerateDocs() - docs[j], _ = client.NewDocFromJSON([]byte(d[0]), collections[0].Schema()) + docs[j], _ = client.NewDocFromJSON([]byte(d[0]), collections[0].Definition()) } collections[0].CreateMany(ctx, docs) //nolint:errcheck @@ -193,7 +201,7 @@ func runCollectionBenchCreateSync(b *testing.B, for j := 0; j < runs; j++ { docs, _ := fixture.GenerateDocs() for k := 0; k < numTypes; k++ { - doc, _ := client.NewDocFromJSON([]byte(docs[k]), collections[k].Schema()) + doc, _ := client.NewDocFromJSON([]byte(docs[k]), collections[k].Definition()) collections[k].Create(ctx, doc) //nolint:errcheck } } @@ -232,7 +240,7 @@ func runCollectionBenchCreateAsync(b *testing.B, docs, _ := fixture.GenerateDocs() // create the documents for j := 0; j < numTypes; j++ { - doc, _ := client.NewDocFromJSON([]byte(docs[j]), collections[j].Schema()) + doc, _ := client.NewDocFromJSON([]byte(docs[j]), collections[j].Definition()) collections[j].Create(ctx, doc) //nolint:errcheck } diff --git a/tests/bench/query/planner/utils.go b/tests/bench/query/planner/utils.go index fdd504175a..dec517d781 100644 --- a/tests/bench/query/planner/utils.go +++ b/tests/bench/query/planner/utils.go @@ -15,6 +15,8 @@ import ( "fmt" "testing" + "github.com/sourcenetwork/defradb/acp" + acpIdentity "github.com/sourcenetwork/defradb/acp/identity" "github.com/sourcenetwork/defradb/core" "github.com/sourcenetwork/defradb/datastore" "github.com/sourcenetwork/defradb/errors" @@ -55,11 +57,11 @@ func runMakePlanBench( fixture fixtures.Generator, query string, ) error { - db, _, err := benchutils.SetupDBAndCollections(b, ctx, fixture) + d, _, err := benchutils.SetupDBAndCollections(b, ctx, fixture) if err != nil { return err } - defer db.Close() + defer d.Close() parser, err := buildParser(ctx, fixture) if err != nil { @@ -71,14 +73,20 @@ func runMakePlanBench( if len(errs) > 0 { return errors.Wrap("failed to parse query string", errors.New(fmt.Sprintf("%v", errs))) } - txn, err := db.NewTxn(ctx, false) + txn, err := d.NewTxn(ctx, false) if err != nil { return errors.Wrap("failed to create txn", err) } - b.ResetTimer() + for i := 0; i < b.N; i++ { - planner := planner.New(ctx, db.WithTxn(txn), txn) + planner := planner.New( + ctx, + acpIdentity.None, + acp.NoACP, + d, + txn, + ) plan, err := planner.MakePlan(q) if err != nil { return errors.Wrap("failed to make plan", err) diff --git a/tests/clients/cli/wrapper.go b/tests/clients/cli/wrapper.go index 89ba2cf3db..b0dddff9cd 100644 --- a/tests/clients/cli/wrapper.go +++ b/tests/clients/cli/wrapper.go @@ -23,6 +23,7 @@ import ( blockstore "github.com/ipfs/boxo/blockstore" "github.com/lens-vm/lens/host-go/config/model" "github.com/libp2p/go-libp2p/core/peer" + "github.com/sourcenetwork/immutable" "github.com/sourcenetwork/defradb/cli" @@ -171,6 +172,26 @@ func (w *Wrapper) BasicExport(ctx context.Context, config *client.BackupConfig) return err } +func (w *Wrapper) AddPolicy( + ctx context.Context, + policy string, +) (client.AddPolicyResult, error) { + args := []string{"client", "acp", "policy", "add"} + args = append(args, policy) + + data, err := w.cmd.execute(ctx, args) + if err != nil { + return client.AddPolicyResult{}, err + } + + var addPolicyResult client.AddPolicyResult + if err := json.Unmarshal(data, &addPolicyResult); err != nil { + return client.AddPolicyResult{}, err + } + + return addPolicyResult, err +} + func (w *Wrapper) AddSchema(ctx context.Context, schema string) ([]client.CollectionDescription, error) { args := []string{"client", "schema", "add"} args = append(args, schema) @@ -210,6 +231,16 @@ func (w *Wrapper) PatchSchema( return err } +func (w *Wrapper) PatchCollection( + ctx context.Context, + patch string, +) error { + args := []string{"client", "collection", "patch"} + args = append(args, patch) + _, err := w.cmd.execute(ctx, args) + return err +} + func (w *Wrapper) SetActiveSchemaVersion(ctx context.Context, schemaVersionID string) error { args := []string{"client", "schema", "set-active"} args = append(args, schemaVersionID) @@ -359,13 +390,16 @@ func (w *Wrapper) GetAllIndexes(ctx context.Context) (map[client.CollectionName] return indexes, nil } -func (w *Wrapper) ExecRequest(ctx context.Context, query string) *client.RequestResult { +func (w *Wrapper) ExecRequest( + ctx context.Context, + query string, +) *client.RequestResult { args := []string{"client", "query"} args = append(args, query) result := &client.RequestResult{} - stdOut, stdErr, err := w.cmd.executeStream(args) + stdOut, stdErr, err := w.cmd.executeStream(ctx, args) if err != nil { result.GQL.Errors = []error{err} return result @@ -474,13 +508,6 @@ func (w *Wrapper) NewConcurrentTxn(ctx context.Context, readOnly bool) (datastor return &Transaction{tx, w.cmd}, nil } -func (w *Wrapper) WithTxn(tx datastore.Txn) client.Store { - return &Wrapper{ - node: w.node, - cmd: w.cmd.withTxn(tx), - } -} - func (w *Wrapper) Root() datastore.RootStore { return w.node.Root() } diff --git a/tests/clients/cli/wrapper_cli.go b/tests/clients/cli/wrapper_cli.go index 2a985dcb18..cee64081d4 100644 --- a/tests/clients/cli/wrapper_cli.go +++ b/tests/clients/cli/wrapper_cli.go @@ -17,12 +17,11 @@ import ( "strings" "github.com/sourcenetwork/defradb/cli" - "github.com/sourcenetwork/defradb/datastore" + "github.com/sourcenetwork/defradb/db" ) type cliWrapper struct { address string - txValue string } func newCliWrapper(address string) *cliWrapper { @@ -31,15 +30,8 @@ func newCliWrapper(address string) *cliWrapper { } } -func (w *cliWrapper) withTxn(tx datastore.Txn) *cliWrapper { - return &cliWrapper{ - address: w.address, - txValue: fmt.Sprintf("%d", tx.ID()), - } -} - -func (w *cliWrapper) execute(_ context.Context, args []string) ([]byte, error) { - stdOut, stdErr, err := w.executeStream(args) +func (w *cliWrapper) execute(ctx context.Context, args []string) ([]byte, error) { + stdOut, stdErr, err := w.executeStream(ctx, args) if err != nil { return nil, err } @@ -57,12 +49,17 @@ func (w *cliWrapper) execute(_ context.Context, args []string) ([]byte, error) { return stdOutData, nil } -func (w *cliWrapper) executeStream(args []string) (io.ReadCloser, io.ReadCloser, error) { +func (w *cliWrapper) executeStream(ctx context.Context, args []string) (io.ReadCloser, io.ReadCloser, error) { stdOutRead, stdOutWrite := io.Pipe() stdErrRead, stdErrWrite := io.Pipe() - if w.txValue != "" { - args = append(args, "--tx", w.txValue) + tx, ok := db.TryGetContextTxn(ctx) + if ok { + args = append(args, "--tx", fmt.Sprintf("%d", tx.ID())) + } + id := db.GetContextIdentity(ctx) + if id.HasValue() { + args = append(args, "--identity", id.Value().String()) } args = append(args, "--url", w.address) diff --git a/tests/clients/cli/wrapper_collection.go b/tests/clients/cli/wrapper_collection.go index be7c3302ac..618d9491d2 100644 --- a/tests/clients/cli/wrapper_collection.go +++ b/tests/clients/cli/wrapper_collection.go @@ -19,8 +19,6 @@ import ( "github.com/sourcenetwork/immutable" "github.com/sourcenetwork/defradb/client" - "github.com/sourcenetwork/defradb/client/request" - "github.com/sourcenetwork/defradb/datastore" "github.com/sourcenetwork/defradb/errors" "github.com/sourcenetwork/defradb/http" ) @@ -56,7 +54,10 @@ func (c *Collection) Definition() client.CollectionDefinition { return c.def } -func (c *Collection) Create(ctx context.Context, doc *client.Document) error { +func (c *Collection) Create( + ctx context.Context, + doc *client.Document, +) error { if !c.Description().Name.HasValue() { return client.ErrOperationNotPermittedOnNamelessCols } @@ -78,7 +79,10 @@ func (c *Collection) Create(ctx context.Context, doc *client.Document) error { return nil } -func (c *Collection) CreateMany(ctx context.Context, docs []*client.Document) error { +func (c *Collection) CreateMany( + ctx context.Context, + docs []*client.Document, +) error { if !c.Description().Name.HasValue() { return client.ErrOperationNotPermittedOnNamelessCols } @@ -110,7 +114,10 @@ func (c *Collection) CreateMany(ctx context.Context, docs []*client.Document) er return nil } -func (c *Collection) Update(ctx context.Context, doc *client.Document) error { +func (c *Collection) Update( + ctx context.Context, + doc *client.Document, +) error { if !c.Description().Name.HasValue() { return client.ErrOperationNotPermittedOnNamelessCols } @@ -133,59 +140,44 @@ func (c *Collection) Update(ctx context.Context, doc *client.Document) error { return nil } -func (c *Collection) Save(ctx context.Context, doc *client.Document) error { +func (c *Collection) Save( + ctx context.Context, + doc *client.Document, +) error { _, err := c.Get(ctx, doc.ID(), true) if err == nil { return c.Update(ctx, doc) } - if errors.Is(err, client.ErrDocumentNotFound) { + if errors.Is(err, client.ErrDocumentNotFoundOrNotAuthorized) { return c.Create(ctx, doc) } return err } -func (c *Collection) Delete(ctx context.Context, docID client.DocID) (bool, error) { - res, err := c.DeleteWithDocID(ctx, docID) - if err != nil { - return false, err - } - return res.Count == 1, nil -} +func (c *Collection) Delete( + ctx context.Context, + docID client.DocID, +) (bool, error) { + args := []string{"client", "collection", "delete"} + args = append(args, "--name", c.Description().Name.Value()) + args = append(args, "--docID", docID.String()) -func (c *Collection) Exists(ctx context.Context, docID client.DocID) (bool, error) { - _, err := c.Get(ctx, docID, false) + _, err := c.cmd.execute(ctx, args) if err != nil { return false, err } return true, nil } -func (c *Collection) UpdateWith(ctx context.Context, target any, updater string) (*client.UpdateResult, error) { - switch t := target.(type) { - case string, map[string]any, *request.Filter: - return c.UpdateWithFilter(ctx, t, updater) - case client.DocID: - return c.UpdateWithDocID(ctx, t, updater) - case []client.DocID: - return c.UpdateWithDocIDs(ctx, t, updater) - default: - return nil, client.ErrInvalidUpdateTarget - } -} - -func (c *Collection) updateWith( +func (c *Collection) Exists( ctx context.Context, - args []string, -) (*client.UpdateResult, error) { - data, err := c.cmd.execute(ctx, args) + docID client.DocID, +) (bool, error) { + _, err := c.Get(ctx, docID, false) if err != nil { - return nil, err - } - var res client.UpdateResult - if err := json.Unmarshal(data, &res); err != nil { - return nil, err + return false, err } - return &res, nil + return true, nil } func (c *Collection) UpdateWithFilter( @@ -207,77 +199,22 @@ func (c *Collection) UpdateWithFilter( } args = append(args, "--filter", string(filterJSON)) - return c.updateWith(ctx, args) -} - -func (c *Collection) UpdateWithDocID( - ctx context.Context, - docID client.DocID, - updater string, -) (*client.UpdateResult, error) { - if !c.Description().Name.HasValue() { - return nil, client.ErrOperationNotPermittedOnNamelessCols - } - - args := []string{"client", "collection", "update"} - args = append(args, "--name", c.Description().Name.Value()) - args = append(args, "--docID", docID.String()) - args = append(args, "--updater", updater) - - return c.updateWith(ctx, args) -} - -func (c *Collection) UpdateWithDocIDs( - ctx context.Context, - docIDs []client.DocID, - updater string, -) (*client.UpdateResult, error) { - if !c.Description().Name.HasValue() { - return nil, client.ErrOperationNotPermittedOnNamelessCols - } - - args := []string{"client", "collection", "update"} - args = append(args, "--name", c.Description().Name.Value()) - args = append(args, "--updater", updater) - - strDocIDs := make([]string, len(docIDs)) - for i, v := range docIDs { - strDocIDs[i] = v.String() - } - args = append(args, "--docID", strings.Join(strDocIDs, ",")) - - return c.updateWith(ctx, args) -} - -func (c *Collection) DeleteWith(ctx context.Context, target any) (*client.DeleteResult, error) { - switch t := target.(type) { - case string, map[string]any, *request.Filter: - return c.DeleteWithFilter(ctx, t) - case client.DocID: - return c.DeleteWithDocID(ctx, t) - case []client.DocID: - return c.DeleteWithDocIDs(ctx, t) - default: - return nil, client.ErrInvalidDeleteTarget - } -} - -func (c *Collection) deleteWith( - ctx context.Context, - args []string, -) (*client.DeleteResult, error) { data, err := c.cmd.execute(ctx, args) if err != nil { return nil, err } - var res client.DeleteResult + + var res client.UpdateResult if err := json.Unmarshal(data, &res); err != nil { return nil, err } return &res, nil } -func (c *Collection) DeleteWithFilter(ctx context.Context, filter any) (*client.DeleteResult, error) { +func (c *Collection) DeleteWithFilter( + ctx context.Context, + filter any, +) (*client.DeleteResult, error) { if !c.Description().Name.HasValue() { return nil, client.ErrOperationNotPermittedOnNamelessCols } @@ -291,39 +228,23 @@ func (c *Collection) DeleteWithFilter(ctx context.Context, filter any) (*client. } args = append(args, "--filter", string(filterJSON)) - return c.deleteWith(ctx, args) -} - -func (c *Collection) DeleteWithDocID(ctx context.Context, docID client.DocID) (*client.DeleteResult, error) { - if !c.Description().Name.HasValue() { - return nil, client.ErrOperationNotPermittedOnNamelessCols - } - - args := []string{"client", "collection", "delete"} - args = append(args, "--name", c.Description().Name.Value()) - args = append(args, "--docID", docID.String()) - - return c.deleteWith(ctx, args) -} - -func (c *Collection) DeleteWithDocIDs(ctx context.Context, docIDs []client.DocID) (*client.DeleteResult, error) { - if !c.Description().Name.HasValue() { - return nil, client.ErrOperationNotPermittedOnNamelessCols + data, err := c.cmd.execute(ctx, args) + if err != nil { + return nil, err } - args := []string{"client", "collection", "delete"} - args = append(args, "--name", c.Description().Name.Value()) - - strDocIDs := make([]string, len(docIDs)) - for i, v := range docIDs { - strDocIDs[i] = v.String() + var res client.DeleteResult + if err := json.Unmarshal(data, &res); err != nil { + return nil, err } - args = append(args, "--docID", strings.Join(strDocIDs, ",")) - - return c.deleteWith(ctx, args) + return &res, nil } -func (c *Collection) Get(ctx context.Context, docID client.DocID, showDeleted bool) (*client.Document, error) { +func (c *Collection) Get( + ctx context.Context, + docID client.DocID, + showDeleted bool, +) (*client.Document, error) { if !c.Description().Name.HasValue() { return nil, client.ErrOperationNotPermittedOnNamelessCols } @@ -340,7 +261,7 @@ func (c *Collection) Get(ctx context.Context, docID client.DocID, showDeleted bo if err != nil { return nil, err } - doc := client.NewDocWithID(docID, c.Schema()) + doc := client.NewDocWithID(docID, c.Definition()) err = doc.SetWithJSON(data) if err != nil { return nil, err @@ -349,14 +270,10 @@ func (c *Collection) Get(ctx context.Context, docID client.DocID, showDeleted bo return doc, nil } -func (c *Collection) WithTxn(tx datastore.Txn) client.Collection { - return &Collection{ - cmd: c.cmd.withTxn(tx), - def: c.def, - } -} +func (c *Collection) GetAllDocIDs( + ctx context.Context, -func (c *Collection) GetAllDocIDs(ctx context.Context) (<-chan client.DocIDResult, error) { +) (<-chan client.DocIDResult, error) { if !c.Description().Name.HasValue() { return nil, client.ErrOperationNotPermittedOnNamelessCols } @@ -364,7 +281,7 @@ func (c *Collection) GetAllDocIDs(ctx context.Context) (<-chan client.DocIDResul args := []string{"client", "collection", "docIDs"} args = append(args, "--name", c.Description().Name.Value()) - stdOut, _, err := c.cmd.executeStream(args) + stdOut, _, err := c.cmd.executeStream(ctx, args) if err != nil { return nil, err } diff --git a/tests/clients/cli/wrapper_lens.go b/tests/clients/cli/wrapper_lens.go index da6011b9eb..a9f3e20bd1 100644 --- a/tests/clients/cli/wrapper_lens.go +++ b/tests/clients/cli/wrapper_lens.go @@ -20,7 +20,6 @@ import ( "github.com/sourcenetwork/immutable/enumerable" "github.com/sourcenetwork/defradb/client" - "github.com/sourcenetwork/defradb/datastore" ) var _ client.LensRegistry = (*LensRegistry)(nil) @@ -29,10 +28,6 @@ type LensRegistry struct { cmd *cliWrapper } -func (w *LensRegistry) WithTxn(tx datastore.Txn) client.LensRegistry { - return &LensRegistry{w.cmd.withTxn(tx)} -} - func (w *LensRegistry) SetMigration(ctx context.Context, collectionID uint32, config model.Lens) error { args := []string{"client", "schema", "migration", "set-registry"} diff --git a/tests/clients/http/wrapper.go b/tests/clients/http/wrapper.go index b45105a7f7..51911c3321 100644 --- a/tests/clients/http/wrapper.go +++ b/tests/clients/http/wrapper.go @@ -17,6 +17,7 @@ import ( blockstore "github.com/ipfs/boxo/blockstore" "github.com/lens-vm/lens/host-go/config/model" "github.com/libp2p/go-libp2p/core/peer" + "github.com/sourcenetwork/immutable" "github.com/sourcenetwork/defradb/client" @@ -97,6 +98,13 @@ func (w *Wrapper) AddSchema(ctx context.Context, schema string) ([]client.Collec return w.client.AddSchema(ctx, schema) } +func (w *Wrapper) AddPolicy( + ctx context.Context, + policy string, +) (client.AddPolicyResult, error) { + return w.client.AddPolicy(ctx, policy) +} + func (w *Wrapper) PatchSchema( ctx context.Context, patch string, @@ -106,6 +114,13 @@ func (w *Wrapper) PatchSchema( return w.client.PatchSchema(ctx, patch, migration, setAsDefaultVersion) } +func (w *Wrapper) PatchCollection( + ctx context.Context, + patch string, +) error { + return w.client.PatchCollection(ctx, patch) +} + func (w *Wrapper) SetActiveSchemaVersion(ctx context.Context, schemaVersionID string) error { return w.client.SetActiveSchemaVersion(ctx, schemaVersionID) } @@ -153,7 +168,10 @@ func (w *Wrapper) GetAllIndexes(ctx context.Context) (map[client.CollectionName] return w.client.GetAllIndexes(ctx) } -func (w *Wrapper) ExecRequest(ctx context.Context, query string) *client.RequestResult { +func (w *Wrapper) ExecRequest( + ctx context.Context, + query string, +) *client.RequestResult { return w.client.ExecRequest(ctx, query) } @@ -181,10 +199,6 @@ func (w *Wrapper) NewConcurrentTxn(ctx context.Context, readOnly bool) (datastor return &TxWrapper{server, client}, nil } -func (w *Wrapper) WithTxn(tx datastore.Txn) client.Store { - return w.client.WithTxn(tx) -} - func (w *Wrapper) Root() datastore.RootStore { return w.node.Root() } diff --git a/tests/gen/cli/gendocs.go b/tests/gen/cli/gendocs.go index 9123bf0c2b..58152c721a 100644 --- a/tests/gen/cli/gendocs.go +++ b/tests/gen/cli/gendocs.go @@ -121,7 +121,7 @@ func saveBatchToCollections( for colName, colDocs := range colDocsMap { for _, col := range collections { if col.Description().Name.Value() == colName { - err := col.CreateMany(context.Background(), colDocs) + err := col.CreateMany(ctx, colDocs) if err != nil { return err } diff --git a/tests/gen/cli/util_test.go b/tests/gen/cli/util_test.go index 10bd98ca99..58b2db083b 100644 --- a/tests/gen/cli/util_test.go +++ b/tests/gen/cli/util_test.go @@ -16,6 +16,7 @@ import ( "testing" badger "github.com/sourcenetwork/badger/v4" + "github.com/sourcenetwork/corelog" "github.com/stretchr/testify/require" "github.com/sourcenetwork/defradb/client" @@ -23,10 +24,9 @@ import ( "github.com/sourcenetwork/defradb/db" "github.com/sourcenetwork/defradb/errors" httpapi "github.com/sourcenetwork/defradb/http" - "github.com/sourcenetwork/defradb/logging" ) -var log = logging.MustNewLogger("cli") +var log = corelog.NewLogger("cli") type defraInstance struct { db client.DB @@ -39,9 +39,9 @@ func (di *defraInstance) close(ctx context.Context) { } func start(ctx context.Context) (*defraInstance, error) { - log.FeedbackInfo(ctx, "Starting DefraDB service...") + log.InfoContext(ctx, "Starting DefraDB service...") - log.FeedbackInfo(ctx, "Building new memory store") + log.InfoContext(ctx, "Building new memory store") opts := badgerds.Options{Options: badger.DefaultOptions("").WithInMemory(true)} rootstore, err := badgerds.NewDatastore("", &opts) @@ -51,7 +51,7 @@ func start(ctx context.Context) (*defraInstance, error) { db, err := db.NewDB(ctx, rootstore) if err != nil { - return nil, errors.Wrap("failed to create database", err) + return nil, errors.Wrap("failed to create a database", err) } handler, err := httpapi.NewHandler(db) diff --git a/tests/gen/gen_auto.go b/tests/gen/gen_auto.go index c837b822a9..487558e934 100644 --- a/tests/gen/gen_auto.go +++ b/tests/gen/gen_auto.go @@ -119,9 +119,9 @@ func (g *randomDocGenerator) getMaxTotalDemand() int { } // getNextPrimaryDocID returns the docID of the next primary document to be used as a relation. -func (g *randomDocGenerator) getNextPrimaryDocID(secondaryType string, field *client.SchemaFieldDescription) string { +func (g *randomDocGenerator) getNextPrimaryDocID(secondaryType string, field *client.FieldDefinition) string { ind := g.configurator.usageCounter.getNextTypeIndForField(secondaryType, field) - return g.generatedDocs[field.Schema][ind].docID + return g.generatedDocs[field.Kind.Underlying()][ind].docID } func (g *randomDocGenerator) generateRandomDocs(order []string) error { @@ -134,12 +134,12 @@ func (g *randomDocGenerator) generateRandomDocs(order []string) error { totalDemand := currentTypeDemand.getAverage() for i := 0; i < totalDemand; i++ { newDoc := make(map[string]any) - for _, field := range typeDef.Schema.Fields { + for _, field := range typeDef.GetFields() { if field.Name == request.DocIDFieldName { continue } if field.IsRelation() { - if field.IsPrimaryRelation { + if field.IsPrimaryRelation && field.Kind.IsObject() { if strings.HasSuffix(field.Name, request.RelatedObjectID) { newDoc[field.Name] = g.getNextPrimaryDocID(typeName, &field) } else { @@ -151,7 +151,7 @@ func (g *randomDocGenerator) generateRandomDocs(order []string) error { newDoc[field.Name] = g.generateRandomValue(typeName, field.Kind, fieldConf) } } - doc, err := client.NewDocFromMap(newDoc, typeDef.Schema) + doc, err := client.NewDocFromMap(newDoc, typeDef) if err != nil { return err } @@ -221,15 +221,12 @@ func validateDefinitions(definitions []client.CollectionDefinition) error { if def.Description.Name.Value() != def.Schema.Name { return NewErrIncompleteColDefinition("description name and schema name do not match") } - for _, field := range def.Schema.Fields { + for _, field := range def.GetFields() { if field.Name == "" { return NewErrIncompleteColDefinition("field name is empty") } if field.Kind.IsObject() { - if field.Schema == "" { - return NewErrIncompleteColDefinition("field schema is empty") - } - fieldRefs = append(fieldRefs, field.Schema) + fieldRefs = append(fieldRefs, field.Kind.Underlying()) } } colNames[def.Description.Name.Value()] = struct{}{} diff --git a/tests/gen/gen_auto_config.go b/tests/gen/gen_auto_config.go index eab85dd318..b69dfb0b98 100644 --- a/tests/gen/gen_auto_config.go +++ b/tests/gen/gen_auto_config.go @@ -58,11 +58,12 @@ func validateConfig(types map[string]client.CollectionDefinition, configsMap con return newNotDefinedTypeErr(typeName) } for fieldName, fieldConfig := range typeConfigs { - fieldDef, hasField := typeDef.Schema.GetFieldByName(fieldName) + fieldDef, hasField := typeDef.GetFieldByName(fieldName) if !hasField { return NewErrInvalidConfiguration("field " + fieldName + " is not defined in the schema for type " + typeName) } + err := checkAndValidateMinMax(&fieldDef, &fieldConfig) if err != nil { return err @@ -82,7 +83,7 @@ func validateConfig(types map[string]client.CollectionDefinition, configsMap con return nil } -func checkAndValidateMinMax(field *client.SchemaFieldDescription, conf *genConfig) error { +func checkAndValidateMinMax(field *client.FieldDefinition, conf *genConfig) error { _, hasMin := conf.props["min"] if hasMin { var err error @@ -100,7 +101,7 @@ func checkAndValidateMinMax(field *client.SchemaFieldDescription, conf *genConfi return nil } -func checkAndValidateLen(field *client.SchemaFieldDescription, conf *genConfig) error { +func checkAndValidateLen(field *client.FieldDefinition, conf *genConfig) error { lenConf, hasLen := conf.props["len"] if hasLen { if field.Kind != client.FieldKind_NILLABLE_STRING { @@ -117,7 +118,7 @@ func checkAndValidateLen(field *client.SchemaFieldDescription, conf *genConfig) return nil } -func checkAndValidateRatio(field *client.SchemaFieldDescription, conf *genConfig) error { +func checkAndValidateRatio(field *client.FieldDefinition, conf *genConfig) error { ratioConf, hasRatio := conf.props["ratio"] if hasRatio { if field.Kind != client.FieldKind_NILLABLE_BOOL { diff --git a/tests/gen/gen_auto_configurator.go b/tests/gen/gen_auto_configurator.go index b4746ae437..ec8c1ea881 100644 --- a/tests/gen/gen_auto_configurator.go +++ b/tests/gen/gen_auto_configurator.go @@ -65,10 +65,10 @@ func newTypeUsageCounter(random *rand.Rand) typeUsageCounters { // addRelationUsage adds a relation usage tracker for a foreign field. func (c *typeUsageCounters) addRelationUsage( secondaryType string, - field client.SchemaFieldDescription, + field client.FieldDefinition, minPerDoc, maxPerDoc, numDocs int, ) { - primaryType := field.Schema + primaryType := field.Kind.Underlying() if _, ok := c.m[primaryType]; !ok { c.m[primaryType] = make(map[string]map[string]*relationUsage) } @@ -81,8 +81,8 @@ func (c *typeUsageCounters) addRelationUsage( } // getNextTypeIndForField returns the next index to be used for a foreign field. -func (c *typeUsageCounters) getNextTypeIndForField(secondaryType string, field *client.SchemaFieldDescription) int { - current := c.m[field.Schema][secondaryType][field.Name] +func (c *typeUsageCounters) getNextTypeIndForField(secondaryType string, field *client.FieldDefinition) int { + current := c.m[field.Kind.Underlying()][secondaryType][field.Name] return current.useNextDocIDIndex() } @@ -272,8 +272,8 @@ func (g *docsGenConfigurator) getDemandForPrimaryType( primaryGraph map[string][]string, ) (typeDemand, error) { primaryTypeDef := g.types[primaryType] - for _, field := range primaryTypeDef.Schema.Fields { - if field.Kind.IsObject() && field.Schema == secondaryType { + for _, field := range primaryTypeDef.GetFields() { + if field.Kind.IsObject() && field.Kind.Underlying() == secondaryType { primaryDemand := typeDemand{min: secondaryDemand.min, max: secondaryDemand.max} minPerDoc, maxPerDoc := 1, 1 @@ -312,7 +312,7 @@ func (g *docsGenConfigurator) getDemandForPrimaryType( return typeDemand{}, NewErrCanNotSupplyTypeDemand(primaryType) } g.docsDemand[primaryType] = primaryDemand - g.initRelationUsages(field.Schema, primaryType, minPerDoc, maxPerDoc) + g.initRelationUsages(field.Kind.Underlying(), primaryType, minPerDoc, maxPerDoc) } } return secondaryDemand, nil @@ -338,13 +338,13 @@ func (g *docsGenConfigurator) calculateDemandForSecondaryTypes( primaryGraph map[string][]string, ) error { typeDef := g.types[typeName] - for _, field := range typeDef.Schema.Fields { + for _, field := range typeDef.GetFields() { if field.Kind.IsObject() && !field.IsPrimaryRelation { primaryDocDemand := g.docsDemand[typeName] newSecDemand := typeDemand{min: primaryDocDemand.min, max: primaryDocDemand.max} minPerDoc, maxPerDoc := 1, 1 - curSecDemand, hasSecDemand := g.docsDemand[field.Schema] + curSecDemand, hasSecDemand := g.docsDemand[field.Kind.Underlying()] if field.Kind.IsArray() { fieldConf := g.config.ForField(typeName, field.Name) @@ -368,21 +368,26 @@ func (g *docsGenConfigurator) calculateDemandForSecondaryTypes( if hasSecDemand { if curSecDemand.min < newSecDemand.min || curSecDemand.max > newSecDemand.max { - return NewErrCanNotSupplyTypeDemand(field.Schema) + return NewErrCanNotSupplyTypeDemand(field.Kind.Underlying()) } } else { - g.docsDemand[field.Schema] = newSecDemand + g.docsDemand[field.Kind.Underlying()] = newSecDemand } - g.initRelationUsages(field.Schema, typeName, minPerDoc, maxPerDoc) + g.initRelationUsages(field.Kind.Underlying(), typeName, minPerDoc, maxPerDoc) - err := g.calculateDemandForSecondaryTypes(field.Schema, primaryGraph) + err := g.calculateDemandForSecondaryTypes(field.Kind.Underlying(), primaryGraph) if err != nil { return err } - for _, primaryTypeName := range primaryGraph[field.Schema] { + for _, primaryTypeName := range primaryGraph[field.Kind.Underlying()] { if _, ok := g.docsDemand[primaryTypeName]; !ok { - primaryDemand, err := g.getDemandForPrimaryType(primaryTypeName, field.Schema, newSecDemand, primaryGraph) + primaryDemand, err := g.getDemandForPrimaryType( + primaryTypeName, + field.Kind.Underlying(), + newSecDemand, + primaryGraph, + ) if err != nil { return err } @@ -396,8 +401,8 @@ func (g *docsGenConfigurator) calculateDemandForSecondaryTypes( func (g *docsGenConfigurator) initRelationUsages(secondaryType, primaryType string, minPerDoc, maxPerDoc int) { secondaryTypeDef := g.types[secondaryType] - for _, secondaryTypeField := range secondaryTypeDef.Schema.Fields { - if secondaryTypeField.Schema == primaryType { + for _, secondaryTypeField := range secondaryTypeDef.GetFields() { + if secondaryTypeField.Kind.Underlying() == primaryType { g.usageCounter.addRelationUsage(secondaryType, secondaryTypeField, minPerDoc, maxPerDoc, g.docsDemand[primaryType].getAverage()) } @@ -417,12 +422,12 @@ func getRelationGraph(types map[string]client.CollectionDefinition) map[string][ } for typeName, typeDef := range types { - for _, field := range typeDef.Schema.Fields { + for _, field := range typeDef.GetFields() { if field.Kind.IsObject() { if field.IsPrimaryRelation { - primaryGraph[typeName] = appendUnique(primaryGraph[typeName], field.Schema) + primaryGraph[typeName] = appendUnique(primaryGraph[typeName], field.Kind.Underlying()) } else { - primaryGraph[field.Schema] = appendUnique(primaryGraph[field.Schema], typeName) + primaryGraph[field.Kind.Underlying()] = appendUnique(primaryGraph[field.Kind.Underlying()], typeName) } } } diff --git a/tests/gen/gen_auto_test.go b/tests/gen/gen_auto_test.go index 0ddca543f2..02cb45331b 100644 --- a/tests/gen/gen_auto_test.go +++ b/tests/gen/gen_auto_test.go @@ -338,7 +338,7 @@ func TestAutoGenerateFromSchema_RelationOneToOne(t *testing.T) { } type Device { - owner: User + owner: User @primary model: String }` @@ -792,7 +792,7 @@ func TestAutoGenerateFromSchema_ConfigThatCanNotBySupplied(t *testing.T) { type Device { model: String - owner: User + owner: User @primary }`, options: []Option{WithTypeDemand("User", 10), WithTypeDemand("Device", 30)}, }, @@ -801,12 +801,12 @@ func TestAutoGenerateFromSchema_ConfigThatCanNotBySupplied(t *testing.T) { type User { name: String device: Device - orders: Order + orders: Order @primary } type Device { model: String - owner: User + owner: User @primary } type Order { @@ -1203,6 +1203,15 @@ func TestAutoGenerate_IfCollectionDefinitionIsIncomplete_ReturnError(t *testing. Description: client.CollectionDescription{ Name: immutable.Some("User"), ID: 0, + Fields: []client.CollectionFieldDescription{ + { + Name: "name", + }, + { + Name: "device", + Kind: immutable.Some[client.FieldKind](client.ObjectKind("Device")), + }, + }, }, Schema: client.SchemaDescription{ Name: "User", @@ -1211,11 +1220,6 @@ func TestAutoGenerate_IfCollectionDefinitionIsIncomplete_ReturnError(t *testing. Name: "name", Kind: client.FieldKind_NILLABLE_INT, }, - { - Name: "device", - Kind: client.FieldKind_FOREIGN_OBJECT, - Schema: "Device", - }, }, }, }, @@ -1223,6 +1227,15 @@ func TestAutoGenerate_IfCollectionDefinitionIsIncomplete_ReturnError(t *testing. Description: client.CollectionDescription{ Name: immutable.Some("Device"), ID: 1, + Fields: []client.CollectionFieldDescription{ + { + Name: "model", + }, + { + Name: "owner", + Kind: immutable.Some[client.FieldKind](client.ObjectKind("User")), + }, + }, }, Schema: client.SchemaDescription{ Name: "Device", @@ -1232,10 +1245,8 @@ func TestAutoGenerate_IfCollectionDefinitionIsIncomplete_ReturnError(t *testing. Kind: client.FieldKind_NILLABLE_STRING, }, { - Name: "owner", - Kind: client.FieldKind_FOREIGN_OBJECT, - Schema: "User", - IsPrimaryRelation: true, + Name: "owner", + Kind: client.ObjectKind("User"), }, }, }, @@ -1269,6 +1280,7 @@ func TestAutoGenerate_IfCollectionDefinitionIsIncomplete_ReturnError(t *testing. name: "field name is empty", changeDefs: func(defs []client.CollectionDefinition) { defs[0].Schema.Fields[0].Name = "" + defs[0].Description.Fields[0].Name = "" }, }, { @@ -1283,18 +1295,6 @@ func TestAutoGenerate_IfCollectionDefinitionIsIncomplete_ReturnError(t *testing. defs[1].Description.ID = 0 }, }, - { - name: "relation field is missing schema name", - changeDefs: func(defs []client.CollectionDefinition) { - defs[1].Schema.Fields[1].Schema = "" - }, - }, - { - name: "relation field references unknown schema", - changeDefs: func(defs []client.CollectionDefinition) { - defs[1].Schema.Fields[1].Schema = "Unknown" - }, - }, } for _, tc := range testCases { @@ -1318,6 +1318,22 @@ func TestAutoGenerate_IfColDefinitionsAreValid_ShouldGenerate(t *testing.T) { Description: client.CollectionDescription{ Name: immutable.Some("User"), ID: 0, + Fields: []client.CollectionFieldDescription{ + { + Name: "name", + }, + { + Name: "age", + }, + { + Name: "rating", + }, + { + Name: "devices", + Kind: immutable.Some[client.FieldKind](client.ObjectArrayKind("Device")), + RelationName: immutable.Some("Device_owner"), + }, + }, }, Schema: client.SchemaDescription{ Name: "User", @@ -1334,12 +1350,6 @@ func TestAutoGenerate_IfColDefinitionsAreValid_ShouldGenerate(t *testing.T) { Name: "rating", Kind: client.FieldKind_NILLABLE_FLOAT, }, - { - Name: "devices", - Kind: client.FieldKind_FOREIGN_OBJECT_ARRAY, - Schema: "Device", - RelationName: "Device_owner", - }, }, }, }, @@ -1347,6 +1357,20 @@ func TestAutoGenerate_IfColDefinitionsAreValid_ShouldGenerate(t *testing.T) { Description: client.CollectionDescription{ Name: immutable.Some("Device"), ID: 1, + Fields: []client.CollectionFieldDescription{ + { + Name: "model", + }, + { + Name: "owner", + Kind: immutable.Some[client.FieldKind](client.ObjectKind("User")), + RelationName: immutable.Some("Device_owner"), + }, + { + Name: "owner_id", + RelationName: immutable.Some("Device_owner"), + }, + }, }, Schema: client.SchemaDescription{ Name: "Device", @@ -1356,10 +1380,14 @@ func TestAutoGenerate_IfColDefinitionsAreValid_ShouldGenerate(t *testing.T) { Kind: client.FieldKind_NILLABLE_STRING, }, { - Name: "owner_id", - Kind: client.FieldKind_DocID, - RelationName: "Device_owner", - Schema: "User", + Name: "owner", + Kind: client.ObjectKind("User"), + Typ: client.LWW_REGISTER, + }, + { + Name: "owner_id", + Kind: client.FieldKind_DocID, + Typ: client.LWW_REGISTER, }, }, }, diff --git a/tests/integration/acp.go b/tests/integration/acp.go new file mode 100644 index 0000000000..eb9bdc8fbe --- /dev/null +++ b/tests/integration/acp.go @@ -0,0 +1,66 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package tests + +import ( + "github.com/sourcenetwork/immutable" + "github.com/stretchr/testify/require" + + acpIdentity "github.com/sourcenetwork/defradb/acp/identity" + "github.com/sourcenetwork/defradb/db" +) + +// AddPolicy will attempt to add the given policy using DefraDB's ACP system. +type AddPolicy struct { + // NodeID may hold the ID (index) of the node we want to add policy to. + // + // If a value is not provided the policy will be added in all nodes. + NodeID immutable.Option[int] + + // The raw policy string. + Policy string + + // The policy creator identity, i.e. actor creating the policy. + Identity string + + // The expected policyID generated based on the Policy loaded in to the ACP system. + ExpectedPolicyID string + + // Any error expected from the action. Optional. + // + // String can be a partial, and the test will pass if an error is returned that + // contains this string. + ExpectedError string +} + +// addPolicyACP will attempt to add the given policy using DefraDB's ACP system. +func addPolicyACP( + s *state, + action AddPolicy, +) { + // If we expect an error, then ExpectedPolicyID should be empty. + if action.ExpectedError != "" && action.ExpectedPolicyID != "" { + require.Fail(s.t, "Expected error should not have an expected policyID with it.", s.testCase.Description) + } + + for _, node := range getNodes(action.NodeID, s.nodes) { + ctx := db.SetContextIdentity(s.ctx, acpIdentity.New(action.Identity)) + policyResult, err := node.AddPolicy(ctx, action.Policy) + + if err == nil { + require.Equal(s.t, action.ExpectedError, "") + require.Equal(s.t, action.ExpectedPolicyID, policyResult.PolicyID) + } + + expectedErrorRaised := AssertError(s.t, s.testCase.Description, err, action.ExpectedError) + assertExpectedErrorRaised(s.t, s.testCase.Description, action.ExpectedError, expectedErrorRaised) + } +} diff --git a/tests/integration/acp/README.md b/tests/integration/acp/README.md new file mode 100644 index 0000000000..1de6847ce4 --- /dev/null +++ b/tests/integration/acp/README.md @@ -0,0 +1,20 @@ +## More Information on ACP test directories. + + +1) `./defradb/tests/integration/acp/add_policy` + - This directory tests ONLY the `Adding of a Policy` through DefraDB. + - Does NOT assert the schema. + - Does NOT test DPI validation. + +2) `./defradb/tests/integration/acp/schema/add_dpi` + - This directory tests the loading/adding of a schema that has `@policy(id, resource)` + specified. The tests ensure that only a schema linking to + a valid DPI policy is accepted. Naturally these tests will also be `Adding a Policy` + through DefraDB like in (1) before actually adding the schema. If a schema has a + policy specified that doesn't exist (or wasn't added yet), that schema WILL/MUST + be rejected in these tests. + - The tests assert the schema after to ensure rejection/acceptance. + - Tests DPI validation. + + +### Learn more about [DPI Rules](/acp/README.md) diff --git a/tests/integration/acp/add_policy/README.md b/tests/integration/acp/add_policy/README.md new file mode 100644 index 0000000000..30b88492b9 --- /dev/null +++ b/tests/integration/acp/add_policy/README.md @@ -0,0 +1,20 @@ +## This directory tests the `Adding of a Policy` through DefraDB. + +### These are NOT DefraDB Policy Interface (DPI) Tests +There are certain requirements for a DPI. A resource must be a valid DPI to link to a collection. +However it's important to note that DefraDB does allow adding policies that might not have DPI +compliant resources. But as long as sourcehub (acp system) deems them to be valid they are allowed +to be added. There are various reasons for this, mostly because DefraDB is a tool that can be used +to upload policies to sourcehub that might not be only for use with collections / schema. Nonetheless +we still need a way to validate that the resource specified on the schema that is being added is DPI +compliant resource on a already registered policy. Therefore, when a schema is being added, and it has +the policyID and resource defined using the `@policy` directive, then during the 'adding of the schema' +the validation occurs. Inotherwords, we do not allow a non-DPI compliant resource to be specified on a +schema, if it is, then the schema is rejected. + +### Non-DPI Compliant Policies Documented In Tests +These test files document some cases where DefraDB would upload policies that aren't DPI compliant, +but are sourcehub compatible, might be worthwhile to look at the documented tests and notes there: +- `./with_no_perms_test.go` +- `./with_no_resources_test.go` +- `./with_permissionless_owner_test.go` diff --git a/tests/integration/acp/add_policy/basic_test.go b/tests/integration/acp/add_policy/basic_test.go new file mode 100644 index 0000000000..fdf53f02cc --- /dev/null +++ b/tests/integration/acp/add_policy/basic_test.go @@ -0,0 +1,100 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_add_policy + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestACP_AddPolicy_BasicYAML_ValidPolicyID(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Test acp, adding basic policy in YAML format", + + Actions: []any{ + testUtils.AddPolicy{ + Identity: actor1Identity, + + Policy: ` + description: a basic policy that satisfies minimum DPI requirements + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + write: + expr: owner + + relations: + owner: + types: + - actor + + `, + + ExpectedPolicyID: "dfe202ffb4f0fe9b46157c313213a3839e08a6f0a7c3aba55e4724cb49ffde8a", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_AddPolicy_BasicJSON_ValidPolicyID(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Test acp, adding basic policy in JSON format", + + Actions: []any{ + testUtils.AddPolicy{ + Identity: actor1Identity, + + Policy: ` + { + "description": "a basic policy that satisfies minimum DPI requirements", + "resources": { + "users": { + "permissions": { + "read": { + "expr": "owner" + }, + "write": { + "expr": "owner" + } + }, + "relations": { + "owner": { + "types": [ + "actor" + ] + } + } + } + }, + "actor": { + "name": "actor" + } + } + `, + + ExpectedPolicyID: "dfe202ffb4f0fe9b46157c313213a3839e08a6f0a7c3aba55e4724cb49ffde8a", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/add_policy/fixture.go b/tests/integration/acp/add_policy/fixture.go new file mode 100644 index 0000000000..97ae5e6ff6 --- /dev/null +++ b/tests/integration/acp/add_policy/fixture.go @@ -0,0 +1,18 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_add_policy + +import ( + acpUtils "github.com/sourcenetwork/defradb/tests/integration/acp" +) + +var actor1Identity = acpUtils.Actor1Identity +var actor2Identity = acpUtils.Actor2Identity diff --git a/tests/integration/acp/add_policy/with_empty_args_test.go b/tests/integration/acp/add_policy/with_empty_args_test.go new file mode 100644 index 0000000000..dc530d95b0 --- /dev/null +++ b/tests/integration/acp/add_policy/with_empty_args_test.go @@ -0,0 +1,93 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_add_policy + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestACP_AddPolicy_EmptyPolicyData_Error(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Test acp, adding empty policy, return error", + + Actions: []any{ + testUtils.AddPolicy{ + Identity: actor1Identity, + + Policy: "", + + ExpectedError: "policy data can not be empty", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_AddPolicy_EmptyPolicyCreator_Error(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Test acp, adding policy, with empty creator, return error", + + Actions: []any{ + testUtils.AddPolicy{ + Identity: "", + + Policy: ` + description: a basic policy that satisfies minimum DPI requirements + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + write: + expr: owner + + relations: + owner: + types: + - actor + + `, + + ExpectedError: "policy creator can not be empty", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_AddPolicy_EmptyCreatorAndPolicyArgs_Error(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Test acp, adding policy, with empty policy and empty creator, return error", + + Actions: []any{ + testUtils.AddPolicy{ + Identity: "", + + Policy: "", + + ExpectedError: "policy creator can not be empty", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/add_policy/with_extra_perms_and_relations_test.go b/tests/integration/acp/add_policy/with_extra_perms_and_relations_test.go new file mode 100644 index 0000000000..cd84e3d910 --- /dev/null +++ b/tests/integration/acp/add_policy/with_extra_perms_and_relations_test.go @@ -0,0 +1,62 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_add_policy + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestACP_AddPolicy_ExtraPermissionsAndExtraRelations_ValidPolicyID(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Test acp, add policy, extra permissions and relations, still valid", + + Actions: []any{ + testUtils.AddPolicy{ + Identity: actor1Identity, + + Policy: ` + description: a policy + + actor: + name: actor + + resources: + users: + permissions: + write: + expr: owner + read: + expr: owner + reader + extra: + expr: joker + + relations: + owner: + types: + - actor + reader: + types: + - actor + joker: + types: + - actor + `, + + ExpectedPolicyID: "ecfeeebd1b65e6a21b2f1b57006176bcbc6a37ef238f27c7034953f46fe04674", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/add_policy/with_extra_perms_test.go b/tests/integration/acp/add_policy/with_extra_perms_test.go new file mode 100644 index 0000000000..8c13555c8d --- /dev/null +++ b/tests/integration/acp/add_policy/with_extra_perms_test.go @@ -0,0 +1,95 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_add_policy + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestACP_AddPolicy_ExtraPermissions_ValidPolicyID(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Test acp, add policy, extra permissions, still valid", + + Actions: []any{ + testUtils.AddPolicy{ + Identity: actor1Identity, + + Policy: ` + description: a policy + + resources: + users: + permissions: + read: + expr: owner + write: + expr: owner + extra: + expr: owner + + relations: + owner: + types: + - actor + + actor: + name: actor + `, + + ExpectedPolicyID: "9d518bb2d5aceb2c8f9b12b909eecd50276c1bd0250069875f265166e6030bb5", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_AddPolicy_ExtraDuplicatePermissions_Error(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Test acp, add policy, extra duplicate permissions, return error", + + Actions: []any{ + testUtils.AddPolicy{ + Identity: actor1Identity, + + Policy: ` + description: a policy + + resources: + users: + permissions: + read: + expr: owner + write: + expr: owner + write: + expr: owner + + relations: + owner: + types: + - actor + + actor: + name: actor + `, + + ExpectedError: "key \"write\" already set in map", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/add_policy/with_extra_relations_test.go b/tests/integration/acp/add_policy/with_extra_relations_test.go new file mode 100644 index 0000000000..d3e4308c24 --- /dev/null +++ b/tests/integration/acp/add_policy/with_extra_relations_test.go @@ -0,0 +1,107 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_add_policy + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestACP_AddPolicy_ExtraRelations_ValidPolicyID(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Test acp, add policy, extra relations, still valid", + + Actions: []any{ + testUtils.AddPolicy{ + Identity: actor1Identity, + + Policy: ` + description: a policy + + actor: + name: actor + + resources: + users: + permissions: + write: + expr: owner + read: + expr: owner + reader + + relations: + owner: + types: + - actor + reader: + types: + - actor + joker: + types: + - actor + `, + + ExpectedPolicyID: "450c47aa47b7b07820f99e5cb38170dc108a2f12b137946e6b47d0c0a73b607f", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_AddPolicy_ExtraDuplicateRelations_Error(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Test acp, add policy, extra duplicate relations permissions, return error", + + Actions: []any{ + testUtils.AddPolicy{ + Identity: actor1Identity, + + Policy: ` + description: a policy + + actor: + name: actor + + resources: + users: + permissions: + write: + expr: owner + read: + expr: owner + reader + + relations: + owner: + types: + - actor + reader: + types: + - actor + joker: + types: + - actor + + joker: + types: + - actor + `, + + ExpectedError: "key \"joker\" already set in map", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/add_policy/with_invalid_creator_arg_test.go b/tests/integration/acp/add_policy/with_invalid_creator_arg_test.go new file mode 100644 index 0000000000..2e56670add --- /dev/null +++ b/tests/integration/acp/add_policy/with_invalid_creator_arg_test.go @@ -0,0 +1,75 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_add_policy + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestACP_AddPolicy_InvalidCreatorIdentityWithValidPolicy_Error(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Test acp, adding policy, with invalid creator, with valid policy, return error", + + Actions: []any{ + testUtils.AddPolicy{ + Identity: "invalid", + + Policy: ` + description: a basic policy that satisfies minimum DPI requirements + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + write: + expr: owner + + relations: + owner: + types: + - actor + + `, + + ExpectedError: "policy creator can not be empty", + }, + }, + } + + //TODO-ACP: https://github.com/sourcenetwork/defradb/issues/2357 + testUtils.AssertPanic(t, func() { testUtils.ExecuteTestCase(t, test) }) +} + +func TestACP_AddPolicy_InvalidCreatorIdentityWithEmptyPolicy_Error(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Test acp, adding policy, with invalid creator, with empty policy, return error", + + Actions: []any{ + testUtils.AddPolicy{ + Identity: "invalid", + + Policy: "", + + ExpectedError: "policy data can not be empty", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/add_policy/with_invalid_relations_test.go b/tests/integration/acp/add_policy/with_invalid_relations_test.go new file mode 100644 index 0000000000..37945509a5 --- /dev/null +++ b/tests/integration/acp/add_policy/with_invalid_relations_test.go @@ -0,0 +1,83 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_add_policy + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestACP_AddPolicy_NoRelations_Error(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Test acp, add policy, no relations, should return error", + + Actions: []any{ + testUtils.AddPolicy{ + Identity: actor1Identity, + + Policy: ` + description: a policy + + actor: + name: actor + + resources: + users: + permissions: + write: + expr: owner + read: + expr: owner + reader + + relations: + `, + + ExpectedError: "resource users: resource missing owner relation: invalid policy", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_AddPolicy_NoRelationsLabel_Error(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Test acp, add policy, no relations label, should return error", + + Actions: []any{ + testUtils.AddPolicy{ + Identity: actor1Identity, + + Policy: ` + description: a policy + + actor: + name: actor + + resources: + users: + permissions: + write: + expr: owner + read: + expr: owner + reader + `, + + ExpectedError: "resource users: resource missing owner relation: invalid policy", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/add_policy/with_invalid_required_relation_test.go b/tests/integration/acp/add_policy/with_invalid_required_relation_test.go new file mode 100644 index 0000000000..d8982703cc --- /dev/null +++ b/tests/integration/acp/add_policy/with_invalid_required_relation_test.go @@ -0,0 +1,94 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_add_policy + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestACP_AddPolicy_MissingRequiredOwnerRelation_Error(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Test acp, add policy, missing requred owner relation, should return error", + + Actions: []any{ + testUtils.AddPolicy{ + Identity: actor1Identity, + + Policy: ` + description: a policy + + actor: + name: actor + + resources: + users: + permissions: + write: + expr: reader + read: + expr: reader + + relations: + reader: + types: + - actor + `, + + ExpectedError: "resource users: resource missing owner relation: invalid policy", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_AddPolicy_DuplicateOwnerRelation_Error(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Test acp, add policy, duplicate required owner relations, return error", + + Actions: []any{ + testUtils.AddPolicy{ + Identity: actor1Identity, + + Policy: ` + description: a policy + + resources: + users: + permissions: + read: + expr: owner + write: + expr: owner + + relations: + owner: + types: + - actor + owner: + types: + - actor + + actor: + name: actor + `, + + ExpectedError: "key \"owner\" already set in map", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/add_policy/with_invalid_resource_test.go b/tests/integration/acp/add_policy/with_invalid_resource_test.go new file mode 100644 index 0000000000..2fc311102d --- /dev/null +++ b/tests/integration/acp/add_policy/with_invalid_resource_test.go @@ -0,0 +1,44 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_add_policy + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestACP_AddPolicy_OneResourceThatIsEmpty_Error(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Test acp, add policy, one resource that is empty, should return error", + + Actions: []any{ + testUtils.AddPolicy{ + Identity: actor1Identity, + + Policy: ` + description: a policy + + actor: + name: actor + + resources: + users: + `, + + ExpectedError: "resource users: resource missing owner relation: invalid policy", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/add_policy/with_managed_relation_test.go b/tests/integration/acp/add_policy/with_managed_relation_test.go new file mode 100644 index 0000000000..d80c5b1c05 --- /dev/null +++ b/tests/integration/acp/add_policy/with_managed_relation_test.go @@ -0,0 +1,61 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_add_policy + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestACP_AddPolicy_WithRelationManagingOtherRelation_ValidPolicyID(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Test acp, where a relation is managing another relation, valid policy id", + Actions: []any{ + testUtils.AddPolicy{ + Identity: actor1Identity, + + Policy: ` + description: a policy with admin relation managing reader relation + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + admin: + manages: + - reader + types: + - actor + `, + + ExpectedPolicyID: "53980e762616fcffbe76307995895e862f87ef3f21d509325d1dc772a770b001", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/add_policy/with_multi_policies_test.go b/tests/integration/acp/add_policy/with_multi_policies_test.go new file mode 100644 index 0000000000..6fbbfb2c39 --- /dev/null +++ b/tests/integration/acp/add_policy/with_multi_policies_test.go @@ -0,0 +1,351 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_add_policy + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestACP_AddPolicy_AddMultipleDifferentPolicies_ValidPolicyIDs(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Test acp, add multiple different policies", + + Actions: []any{ + testUtils.AddPolicy{ + Identity: actor1Identity, + + Policy: ` + description: a policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + write: + expr: owner + + relations: + owner: + types: + - actor + + `, + + ExpectedPolicyID: "dfe202ffb4f0fe9b46157c313213a3839e08a6f0a7c3aba55e4724cb49ffde8a", + }, + + testUtils.AddPolicy{ + Identity: actor1Identity, + + Policy: ` + description: another policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + admin: + manages: + - reader + types: + - actor + `, + + ExpectedPolicyID: "53980e762616fcffbe76307995895e862f87ef3f21d509325d1dc772a770b001", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_AddPolicy_AddMultipleDifferentPoliciesInDifferentFmts_ValidPolicyIDs(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Test acp, add multiple different policies in different formats", + + Actions: []any{ + testUtils.AddPolicy{ + Identity: actor1Identity, + + Policy: ` + { + "description": "a policy", + "actor": { + "name": "actor" + }, + "resources": { + "users": { + "permissions": { + "read": { + "expr": "owner" + }, + "write": { + "expr": "owner" + } + }, + "relations": { + "owner": { + "types": [ + "actor" + ] + } + } + } + } + } + `, + + ExpectedPolicyID: "dfe202ffb4f0fe9b46157c313213a3839e08a6f0a7c3aba55e4724cb49ffde8a", + }, + + testUtils.AddPolicy{ + Identity: actor1Identity, + + Policy: ` + description: another policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + admin: + manages: + - reader + types: + - actor + `, + + ExpectedPolicyID: "53980e762616fcffbe76307995895e862f87ef3f21d509325d1dc772a770b001", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_AddPolicy_AddDuplicatePolicyByOtherCreator_ValidPolicyIDs(t *testing.T) { + const policyUsedByBoth string = ` + description: a policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + write: + expr: owner + + relations: + owner: + types: + - actor + ` + + test := testUtils.TestCase{ + + Description: "Test acp, add duplicate policies by different actors, valid", + + Actions: []any{ + testUtils.AddPolicy{ + Identity: actor1Identity, + + Policy: policyUsedByBoth, + + ExpectedPolicyID: "dfe202ffb4f0fe9b46157c313213a3839e08a6f0a7c3aba55e4724cb49ffde8a", + }, + + testUtils.AddPolicy{ + Identity: actor2Identity, + + Policy: policyUsedByBoth, + + ExpectedPolicyID: "551c57323f33decfdc23312e5e1036e3ab85d2414e962814dab9101619dd9ff9", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_AddPolicy_AddMultipleDuplicatePolicies_Error(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Test acp, add duplicate policies, error", + + Actions: []any{ + testUtils.AddPolicy{ + Identity: actor1Identity, + + Policy: ` + description: a policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + write: + expr: owner + + relations: + owner: + types: + - actor + + `, + + ExpectedPolicyID: "dfe202ffb4f0fe9b46157c313213a3839e08a6f0a7c3aba55e4724cb49ffde8a", + }, + + testUtils.AddPolicy{ + Identity: actor1Identity, + + Policy: ` + description: a policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + write: + expr: owner + + relations: + owner: + types: + - actor + + `, + + ExpectedError: "policy dfe202ffb4f0fe9b46157c313213a3839e08a6f0a7c3aba55e4724cb49ffde8a: policy exists", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_AddPolicy_AddMultipleDuplicatePoliciesDifferentFmts_Error(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Test acp, add duplicate policies different formats, error", + + Actions: []any{ + testUtils.AddPolicy{ + Identity: actor1Identity, + + Policy: ` + description: a policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + write: + expr: owner + + relations: + owner: + types: + - actor + `, + + ExpectedPolicyID: "dfe202ffb4f0fe9b46157c313213a3839e08a6f0a7c3aba55e4724cb49ffde8a", + }, + + testUtils.AddPolicy{ + Identity: actor1Identity, + + Policy: ` + { + "description": "a policy", + "actor": { + "name": "actor" + }, + "resources": { + "users": { + "permissions": { + "read": { + "expr": "owner" + }, + "write": { + "expr": "owner" + } + }, + "relations": { + "owner": { + "types": [ + "actor" + ] + } + } + } + } + } + `, + + ExpectedError: "policy dfe202ffb4f0fe9b46157c313213a3839e08a6f0a7c3aba55e4724cb49ffde8a: policy exists", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/add_policy/with_multiple_resources_test.go b/tests/integration/acp/add_policy/with_multiple_resources_test.go new file mode 100644 index 0000000000..6d6c890452 --- /dev/null +++ b/tests/integration/acp/add_policy/with_multiple_resources_test.go @@ -0,0 +1,173 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_add_policy + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestACP_AddPolicy_MultipleResources_ValidID(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Test acp, add policy, multiple resources, valid ID", + + Actions: []any{ + testUtils.AddPolicy{ + Identity: actor1Identity, + + Policy: ` + description: a policy + + actor: + name: actor + + resources: + users: + permissions: + write: + expr: owner + read: + expr: owner + reader + + relations: + owner: + types: + - actor + reader: + types: + - actor + books: + permissions: + write: + expr: owner + read: + expr: owner + reader + + relations: + owner: + types: + - actor + reader: + types: + - actor + `, + + ExpectedPolicyID: "cf082c11fa812dddaa5093f0ccae66c2b5294efe0a2b50ffdcbc0185adf6adf1", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_AddPolicy_MultipleResourcesUsingRelationDefinedInOther_Error(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Test acp, add policy, multiple resources using other's relation, return error", + + Actions: []any{ + testUtils.AddPolicy{ + Identity: actor1Identity, + + Policy: ` + description: a policy + + actor: + name: actor + + resources: + users: + permissions: + write: + expr: owner + read: + expr: owner + reader + + relations: + owner: + types: + - actor + reader: + types: + - actor + books: + permissions: + write: + expr: owner + read: + expr: owner + reader + + relations: + owner: + types: + - actor + `, + + ExpectedError: "resource books missing relation reader", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_AddPolicy_SecondResourcesMissingRequiredOwner_Error(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Test acp, add policy, multiple resources second missing required owner, return error", + + Actions: []any{ + testUtils.AddPolicy{ + Identity: actor1Identity, + + Policy: ` + description: a policy + + actor: + name: actor + + resources: + users: + permissions: + write: + expr: owner + read: + expr: owner + reader + + relations: + owner: + types: + - actor + reader: + types: + - actor + books: + permissions: + write: + expr: owner + read: + expr: owner + reader + + relations: + reader: + types: + - actor + `, + + ExpectedError: "resource books: resource missing owner relation: invalid policy", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/add_policy/with_no_perms_test.go b/tests/integration/acp/add_policy/with_no_perms_test.go new file mode 100644 index 0000000000..156788ca45 --- /dev/null +++ b/tests/integration/acp/add_policy/with_no_perms_test.go @@ -0,0 +1,163 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_add_policy + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +// Note: Eventhough this file shows we can load a policy, that has no permissions. It is important +// to know that DPI always has a set of permissions it requires. Therefore when a schema is loaded, +// and it has policyID and resource defined on the collection, then before we accept that schema +// the validation occurs. +// Inotherwords, we do not allow a non-DPI compliant policy to be specified on a collection schema, if +// it is the schema would be rejected. However we register the policy with acp even if +// the policy is not DPI compliant. + +func TestACP_AddPolicy_NoPermissionsOnlyOwner_ValidID(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Test acp, add policy, no permissions only owner relation", + + Actions: []any{ + testUtils.AddPolicy{ + Identity: actor1Identity, + + Policy: ` + description: a policy + + actor: + name: actor + + resources: + users: + permissions: + + relations: + owner: + types: + - actor + + `, + + ExpectedPolicyID: "b6edfd9d24a79067a2f5960e1369499ebaf4c5ec6747e2f444f33bf9c3915fcb", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_AddPolicy_NoPermissionsMultiRelations_ValidID(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Test acp, add policy, no permissions with multi relations", + + Actions: []any{ + testUtils.AddPolicy{ + Identity: actor1Identity, + + Policy: ` + description: a policy + + actor: + name: actor + + resources: + users: + permissions: + + relations: + owner: + types: + - actor + reader: + types: + - actor + + `, + + ExpectedPolicyID: "7eb7448daa631cfe33da3a149f5eea716026f54bf23ce1315c594259382c5c57", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_AddPolicy_NoPermissionsLabelOnlyOwner_ValidID(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Test acp, add policy, no permissions label only owner relation", + + Actions: []any{ + testUtils.AddPolicy{ + Identity: actor1Identity, + + Policy: ` + description: a policy + + actor: + name: actor + + resources: + users: + relations: + owner: + types: + - actor + + `, + + ExpectedPolicyID: "b6edfd9d24a79067a2f5960e1369499ebaf4c5ec6747e2f444f33bf9c3915fcb", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_AddPolicy_NoPermissionsLabelMultiRelations_ValidID(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Test acp, add policy, no permissions label with multi relations", + + Actions: []any{ + testUtils.AddPolicy{ + Identity: actor1Identity, + + Policy: ` + description: a policy + + actor: + name: actor + + resources: + users: + relations: + owner: + types: + - actor + reader: + types: + - actor + + `, + + ExpectedPolicyID: "7eb7448daa631cfe33da3a149f5eea716026f54bf23ce1315c594259382c5c57", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/add_policy/with_no_resources_test.go b/tests/integration/acp/add_policy/with_no_resources_test.go new file mode 100644 index 0000000000..6b4097584a --- /dev/null +++ b/tests/integration/acp/add_policy/with_no_resources_test.go @@ -0,0 +1,92 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_add_policy + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +// Eventhough empty resources make no sense from a DefraDB (DPI) perspective, +// it is still a valid sourcehub policy for now. +func TestACP_AddPolicy_NoResource_ValidID(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Test acp, add policy, no resource, valid policy", + + Actions: []any{ + testUtils.AddPolicy{ + Identity: actor1Identity, + + Policy: ` + description: a policy + + actor: + name: actor + + resources: + `, + + ExpectedPolicyID: "b72d8ec56ffb141922781d2b1b0803404bef57be0eeec98f1662f3017fc2de35", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +// Eventhough empty resources make no sense from a DefraDB (DPI) perspective, +// it is still a valid sourcehub policy for now. +func TestACP_AddPolicy_NoResourceLabel_ValidID(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Test acp, add policy, no resource label, valid policy", + + Actions: []any{ + testUtils.AddPolicy{ + Identity: actor1Identity, + + Policy: ` + description: a policy + + actor: + name: actor + `, + + ExpectedPolicyID: "b72d8ec56ffb141922781d2b1b0803404bef57be0eeec98f1662f3017fc2de35", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +// Eventhough empty resources make no sense from a DefraDB (DPI) perspective, +// it is still a valid sourcehub policy for now. +func TestACP_AddPolicy_PolicyWithOnlySpace_ValidID(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Test acp, adding a policy that has only space", + + Actions: []any{ + testUtils.AddPolicy{ + Identity: actor1Identity, + + Policy: " ", + + ExpectedPolicyID: "b72d8ec56ffb141922781d2b1b0803404bef57be0eeec98f1662f3017fc2de35", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/add_policy/with_perm_expr_test.go b/tests/integration/acp/add_policy/with_perm_expr_test.go new file mode 100644 index 0000000000..177de98ebe --- /dev/null +++ b/tests/integration/acp/add_policy/with_perm_expr_test.go @@ -0,0 +1,98 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_add_policy + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestACP_AddPolicy_PermissionExprWithOwnerInTheEndWithMinus_ValidID(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Test acp, add policy with permission expr having owner in the end with minus, ValidID", + + Actions: []any{ + testUtils.AddPolicy{ + Identity: actor1Identity, + + Policy: ` + description: a policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: reader - owner + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + `, + + ExpectedPolicyID: "d74384d99b6732c3a6e0e47c7b75ea19553f643bcca416380530d8ad4e50e529", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +// Note: this and above test both result in different policy ids. +func TestACP_AddPolicy_PermissionExprWithOwnerInTheEndWithMinusNoSpace_ValidID(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Test acp, add policy with permission expr having owner in the end with minus no space, ValidID", + + Actions: []any{ + testUtils.AddPolicy{ + Identity: actor1Identity, + + Policy: ` + description: a policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: reader-owner + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + `, + + ExpectedPolicyID: "f6d5d6d8b0183230fcbdf06cfe14b611f782752d276006ad4622231eeaf60820", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/add_policy/with_perm_invalid_expr_test.go b/tests/integration/acp/add_policy/with_perm_invalid_expr_test.go new file mode 100644 index 0000000000..7c5033d700 --- /dev/null +++ b/tests/integration/acp/add_policy/with_perm_invalid_expr_test.go @@ -0,0 +1,137 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_add_policy + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestACP_AddPolicy_EmptyExpressionInPermission_Error(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Test acp, add policy with permission having empr expr, error", + + Actions: []any{ + testUtils.AddPolicy{ + Identity: actor1Identity, + + Policy: ` + description: a policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + `, + + ExpectedError: "relation read: error parsing: expression needs: term", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_AddPolicy_PermissionExprWithOwnerInTheEndWithInocorrectSymbol_Error(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Test acp, add policy with permission expr having owner in the end with incorrect symbol, error", + + Actions: []any{ + testUtils.AddPolicy{ + Identity: actor1Identity, + + Policy: ` + description: a policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: reader ^ owner + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + `, + + ExpectedError: "error parsing expression reader ^ owner: unknown token:", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_AddPolicy_PermissionExprWithOwnerInTheEndWithInocorrectSymbolNoSpace_Error(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Test acp, add policy with permission expr having owner in the end with incorrect symbol with no space, error", + + Actions: []any{ + testUtils.AddPolicy{ + Identity: actor1Identity, + + Policy: ` + description: a policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: reader^owner + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + `, + + ExpectedError: "error parsing expression reader^owner: unknown token:", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/add_policy/with_permissionless_owner_test.go b/tests/integration/acp/add_policy/with_permissionless_owner_test.go new file mode 100644 index 0000000000..0fda8a7468 --- /dev/null +++ b/tests/integration/acp/add_policy/with_permissionless_owner_test.go @@ -0,0 +1,144 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_add_policy + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +// Note: Similar to the one in ./with_no_perms_test.go +// Eventhough this file shows we can load a policy, that assigns no read/write permissions which +// are required for DPI. When a schema is loaded, and it has policyID and resource defined on the +// collection, then before we accept that schema the validation occurs. Inotherwords, we do not +// allow a non-DPI compliant policy to be specified on a collection schema, if it is, then the schema +// would be rejected. However we register the policy with acp even if policy isn't DPI compliant. + +func TestACP_AddPolicy_PermissionlessOwnerWrite_ValidID(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Test acp, add policy with owner having no write permissions, valid ID", + + Actions: []any{ + testUtils.AddPolicy{ + Identity: actor1Identity, + + Policy: ` + description: a policy + + actor: + name: actor + + resources: + users: + permissions: + write: + expr: reader + read: + expr: owner + reader + + relations: + owner: + types: + - actor + reader: + types: + - actor + `, + + ExpectedPolicyID: "af1ee9ffe8558da8455dc1cfc5897028c16c038a053b4cf740dfcef8032d944a", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_AddPolicy_PermissionlessOwnerRead_ValidID(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Test acp, add policy with owner having no read permissions, valid ID", + + Actions: []any{ + testUtils.AddPolicy{ + Identity: actor1Identity, + + Policy: ` + description: a policy + + actor: + name: actor + + resources: + users: + permissions: + write: + expr: owner + reader + read: + expr: reader + + relations: + owner: + types: + - actor + reader: + types: + - actor + `, + + ExpectedPolicyID: "3ceb4a4be889998496355604b68836bc280dc26dab829af3ec45b63d7767a7f1", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_AddPolicy_PermissionlessOwnerReadWrite_ValidID(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Test acp, add policy with owner having no read/write permissions, valid ID", + + Actions: []any{ + testUtils.AddPolicy{ + Identity: actor1Identity, + + Policy: ` + description: a policy + + actor: + name: actor + + resources: + users: + permissions: + write: + expr: reader + read: + expr: owner + reader + + relations: + owner: + types: + - actor + reader: + types: + - actor + `, + + ExpectedPolicyID: "af1ee9ffe8558da8455dc1cfc5897028c16c038a053b4cf740dfcef8032d944a", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/add_policy/with_unused_relations_test.go b/tests/integration/acp/add_policy/with_unused_relations_test.go new file mode 100644 index 0000000000..fbcec10755 --- /dev/null +++ b/tests/integration/acp/add_policy/with_unused_relations_test.go @@ -0,0 +1,58 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_add_policy + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestACP_AddPolicy_UnusedRelation_ValidID(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Test acp, add policy, unused relation in permissions", + + Actions: []any{ + testUtils.AddPolicy{ + Identity: actor1Identity, + + Policy: ` + description: a policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + + `, + + ExpectedPolicyID: "e1bb7702f653d4f9a0595d2d97c209fc0da8f315be007bd19545599eed41ae42", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/fixture.go b/tests/integration/acp/fixture.go new file mode 100644 index 0000000000..ae05f780a4 --- /dev/null +++ b/tests/integration/acp/fixture.go @@ -0,0 +1,16 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp + +var ( + Actor1Identity = "cosmos1zzg43wdrhmmk89z3pmejwete2kkd4a3vn7w969" + Actor2Identity = "cosmos1x25hhksxhu86r45hqwk28dd70qzux3262hdrll" +) diff --git a/tests/integration/acp/index/create_test.go b/tests/integration/acp/index/create_test.go new file mode 100644 index 0000000000..f2c9b193a7 --- /dev/null +++ b/tests/integration/acp/index/create_test.go @@ -0,0 +1,174 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_index + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" + acpUtils "github.com/sourcenetwork/defradb/tests/integration/acp" +) + +// This test documents that we don't allow creating indexes on collections that have policy +// until the following is implemented: +// TODO-ACP: ACP <> P2P https://github.com/sourcenetwork/defradb/issues/2365 +func TestACP_IndexCreateWithSeparateRequest_OnCollectionWithPolicy_ReturnError(t *testing.T) { + test := testUtils.TestCase{ + Description: "Test acp, with creating new index using separate request on permissioned collection, error", + Actions: []any{ + + testUtils.AddPolicy{ + + Identity: acpUtils.Actor1Identity, + + Policy: ` + description: a test policy which marks a collection in a database as a resource + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + admin: + manages: + - reader + types: + - actor + `, + + ExpectedPolicyID: "53980e762616fcffbe76307995895e862f87ef3f21d509325d1dc772a770b001", + }, + + testUtils.SchemaUpdate{ + Schema: ` + type Users @policy( + id: "53980e762616fcffbe76307995895e862f87ef3f21d509325d1dc772a770b001", + resource: "users" + ) { + name: String + age: Int + } + `, + }, + + testUtils.CreateIndex{ + CollectionID: 0, + + IndexName: "some_index", + + FieldName: "name", + + ExpectedError: "can not create index on a collection with a policy", + }, + + testUtils.Request{ + Request: ` + query { + Users { + name + age + } + }`, + + Results: []map[string]any{}, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +// This test documents that we don't allow creating indexes on collections that have policy +// until the following is implemented: +// TODO-ACP: ACP <> P2P https://github.com/sourcenetwork/defradb/issues/2365 +func TestACP_IndexCreateWithDirective_OnCollectionWithPolicy_ReturnError(t *testing.T) { + test := testUtils.TestCase{ + Description: "Test acp, with creating new index using directive on permissioned collection, error", + Actions: []any{ + + testUtils.AddPolicy{ + + Identity: acpUtils.Actor1Identity, + + Policy: ` + description: a test policy which marks a collection in a database as a resource + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + admin: + manages: + - reader + types: + - actor + `, + + ExpectedPolicyID: "53980e762616fcffbe76307995895e862f87ef3f21d509325d1dc772a770b001", + }, + + testUtils.SchemaUpdate{ + Schema: ` + type Users @policy( + id: "53980e762616fcffbe76307995895e862f87ef3f21d509325d1dc772a770b001", + resource: "users" + ) { + name: String @index + age: Int + } + `, + + ExpectedError: "can not create index on a collection with a policy", + }, + + testUtils.Request{ + Request: ` + query { + Users { + name + age + } + }`, + + ExpectedError: `Cannot query field "Users" on type "Query"`, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/p2p/replicator_test.go b/tests/integration/acp/p2p/replicator_test.go new file mode 100644 index 0000000000..9c3b0eca3f --- /dev/null +++ b/tests/integration/acp/p2p/replicator_test.go @@ -0,0 +1,89 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_p2p + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" + acpUtils "github.com/sourcenetwork/defradb/tests/integration/acp" +) + +// This test documents that we don't allow setting replicator with a collections that has a policy +// until the following is implemented: +// TODO-ACP: ACP <> P2P https://github.com/sourcenetwork/defradb/issues/2366 +func TestACP_P2POneToOneReplicatorWithPermissionedCollection_Error(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Test acp, with p2p replicator with permissioned collection, error", + + Actions: []any{ + + testUtils.RandomNetworkingConfig(), + testUtils.RandomNetworkingConfig(), + + testUtils.AddPolicy{ + + Identity: acpUtils.Actor1Identity, + + Policy: ` + description: a test policy which marks a collection in a database as a resource + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + admin: + manages: + - reader + types: + - actor + `, + + ExpectedPolicyID: "53980e762616fcffbe76307995895e862f87ef3f21d509325d1dc772a770b001", + }, + + testUtils.SchemaUpdate{ + Schema: ` + type Users @policy( + id: "53980e762616fcffbe76307995895e862f87ef3f21d509325d1dc772a770b001", + resource: "users" + ) { + name: String + age: Int + } + `, + }, + + testUtils.ConfigureReplicator{ + SourceNodeID: 0, + TargetNodeID: 1, + ExpectedError: "replicator can not use all collections, as some have policy", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/p2p/subscribe_test.go b/tests/integration/acp/p2p/subscribe_test.go new file mode 100644 index 0000000000..610339d24f --- /dev/null +++ b/tests/integration/acp/p2p/subscribe_test.go @@ -0,0 +1,99 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_p2p + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" + acpUtils "github.com/sourcenetwork/defradb/tests/integration/acp" +) + +// This test documents that we don't allow subscribing to a collection that has a policy +// until the following is implemented: +// TODO-ACP: ACP <> P2P https://github.com/sourcenetwork/defradb/issues/2366 +func TestACP_P2PSubscribeAddGetSingleWithPermissionedCollection_Error(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Test acp, with p2p subscribe with permissioned collection, error", + + Actions: []any{ + + testUtils.RandomNetworkingConfig(), + testUtils.RandomNetworkingConfig(), + + testUtils.AddPolicy{ + + Identity: acpUtils.Actor1Identity, + + Policy: ` + description: a test policy which marks a collection in a database as a resource + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + admin: + manages: + - reader + types: + - actor + `, + + ExpectedPolicyID: "53980e762616fcffbe76307995895e862f87ef3f21d509325d1dc772a770b001", + }, + + testUtils.SchemaUpdate{ + Schema: ` + type Users @policy( + id: "53980e762616fcffbe76307995895e862f87ef3f21d509325d1dc772a770b001", + resource: "users" + ) { + name: String + age: Int + } + `, + }, + + testUtils.ConnectPeers{ + SourceNodeID: 1, + TargetNodeID: 0, + }, + + testUtils.SubscribeToCollection{ + NodeID: 1, + CollectionIDs: []int{0}, + ExpectedError: "p2p collection specified has a policy on it", + }, + + testUtils.GetAllP2PCollections{ + NodeID: 1, + ExpectedCollectionIDs: []int{}, // Note: Empty + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/query/avg_test.go b/tests/integration/acp/query/avg_test.go new file mode 100644 index 0000000000..cd540f83ad --- /dev/null +++ b/tests/integration/acp/query/avg_test.go @@ -0,0 +1,98 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" + acpUtils "github.com/sourcenetwork/defradb/tests/integration/acp" +) + +func TestACP_QueryAverageWithoutIdentity(t *testing.T) { + test := testUtils.TestCase{ + Description: "Test acp, query average without identity", + + Actions: []any{ + getSetupEmployeeCompanyActions(), + + testUtils.Request{ + Request: ` + query { + _avg(Employee: {field: salary}) + } + `, + Results: []map[string]any{ + { + // 2 public employees, 1 with salary 10k, 1 with salary 20k + "_avg": int(15000), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_QueryAverageWithIdentity(t *testing.T) { + test := testUtils.TestCase{ + Description: "Test acp, query average with identity", + + Actions: []any{ + getSetupEmployeeCompanyActions(), + + testUtils.Request{ + Identity: acpUtils.Actor1Identity, + Request: ` + query { + _avg(Employee: {field: salary}) + } + `, + Results: []map[string]any{ + { + // 4 employees with salaries 10k, 20k, 30k, 40k + "_avg": int(25000), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_QueryAverageWithWrongIdentity(t *testing.T) { + test := testUtils.TestCase{ + Description: "Test acp, query average without identity", + + Actions: []any{ + getSetupEmployeeCompanyActions(), + + testUtils.Request{ + Identity: acpUtils.Actor2Identity, + Request: ` + query { + _avg(Employee: {field: salary}) + } + `, + Results: []map[string]any{ + { + // 2 public employees, 1 with salary 10k, 1 with salary 20k + "_avg": int(15000), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/query/count_test.go b/tests/integration/acp/query/count_test.go new file mode 100644 index 0000000000..74c4025c22 --- /dev/null +++ b/tests/integration/acp/query/count_test.go @@ -0,0 +1,183 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" + acpUtils "github.com/sourcenetwork/defradb/tests/integration/acp" +) + +func TestACP_QueryCountDocumentsWithoutIdentity(t *testing.T) { + test := testUtils.TestCase{ + Description: "Test acp, query documents' count without identity", + + Actions: []any{ + getSetupEmployeeCompanyActions(), + + testUtils.Request{ + Request: ` + query { + _count(Employee: {}) + } + `, + Results: []map[string]any{ + { + "_count": int(2), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_QueryCountRelatedObjectsWithoutIdentity(t *testing.T) { + test := testUtils.TestCase{ + Description: "Test acp, query count of related objects without identity", + + Actions: []any{ + getSetupEmployeeCompanyActions(), + + testUtils.Request{ + Request: ` + query { + Company { + _count(employees: {}) + } + } + `, + Results: []map[string]any{ + { + // 1 of 2 companies is public and has 1 public employee out of 2 + "_count": int(1), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_QueryCountDocumentsWithIdentity(t *testing.T) { + test := testUtils.TestCase{ + Description: "Test acp, query documents' count with identity", + + Actions: []any{ + getSetupEmployeeCompanyActions(), + + testUtils.Request{ + Identity: acpUtils.Actor1Identity, + Request: ` + query { + _count(Employee: {}) + } + `, + Results: []map[string]any{ + { + "_count": int(4), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_QueryCountRelatedObjectsWithIdentity(t *testing.T) { + test := testUtils.TestCase{ + Description: "Test acp, query count of related objects with identity", + + Actions: []any{ + getSetupEmployeeCompanyActions(), + + testUtils.Request{ + Identity: acpUtils.Actor1Identity, + Request: ` + query { + Company { + _count(employees: {}) + } + } + `, + Results: []map[string]any{ + { + "_count": int(2), + }, + { + "_count": int(2), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_QueryCountDocumentsWithWrongIdentity(t *testing.T) { + test := testUtils.TestCase{ + Description: "Test acp, query documents' count without identity", + + Actions: []any{ + getSetupEmployeeCompanyActions(), + + testUtils.Request{ + Identity: acpUtils.Actor2Identity, + Request: ` + query { + _count(Employee: {}) + } + `, + Results: []map[string]any{ + { + "_count": int(2), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_QueryCountRelatedObjectsWithWrongIdentity(t *testing.T) { + test := testUtils.TestCase{ + Description: "Test acp, query count of related objects without identity", + + Actions: []any{ + getSetupEmployeeCompanyActions(), + + testUtils.Request{ + Identity: acpUtils.Actor2Identity, + Request: ` + query { + Company { + _count(employees: {}) + } + } + `, + Results: []map[string]any{ + { + // 1 of 2 companies is public and has 1 public employee out of 2 + "_count": int(1), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/query/fixture.go b/tests/integration/acp/query/fixture.go new file mode 100644 index 0000000000..ed81ed0633 --- /dev/null +++ b/tests/integration/acp/query/fixture.go @@ -0,0 +1,148 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp + +import ( + testUtils "github.com/sourcenetwork/defradb/tests/integration" + acpUtils "github.com/sourcenetwork/defradb/tests/integration/acp" +) + +const employeeCompanyPolicy = ` +description: A Valid DefraDB Policy Interface (DPI) + +actor: + name: actor + +resources: + employees: + permissions: + read: + expr: owner + reader + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + + companies: + permissions: + read: + expr: owner + reader + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor +` + +func getSetupEmployeeCompanyActions() []any { + return []any{ + testUtils.AddPolicy{ + Identity: acpUtils.Actor1Identity, + Policy: employeeCompanyPolicy, + ExpectedPolicyID: "67607eb2a2a873f4a69eb6876323cee7601d8a4d4fedcc18154aaee65cf38e7f", + }, + + testUtils.SchemaUpdate{ + Schema: ` + type Employee @policy( + id: "67607eb2a2a873f4a69eb6876323cee7601d8a4d4fedcc18154aaee65cf38e7f", + resource: "employees" + ) { + name: String + salary: Int + company: Company + } + + type Company @policy( + id: "67607eb2a2a873f4a69eb6876323cee7601d8a4d4fedcc18154aaee65cf38e7f", + resource: "companies" + ) { + name: String + capital: Int + employees: [Employee] + } + `, + }, + + testUtils.CreateDoc{ + CollectionID: 1, + Doc: ` + { + "name": "Public Company", + "capital": 100000 + } + `, + }, + testUtils.CreateDoc{ + CollectionID: 1, + Identity: acpUtils.Actor1Identity, + Doc: ` + { + "name": "Private Company", + "capital": 200000 + } + `, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: ` + { + "name": "PubEmp in PubCompany", + "salary": 10000, + "company": "bae-1ab7ac86-3c68-5abb-b526-803858c9dccf" + } + `, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: ` + { + "name": "PubEmp in PrivateCompany", + "salary": 20000, + "company": "bae-4aef4bd6-e2ee-5075-85a5-4d64bbf80bca" + } + `, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Identity: acpUtils.Actor1Identity, + Doc: ` + { + "name": "PrivateEmp in PubCompany", + "salary": 30000, + "company": "bae-1ab7ac86-3c68-5abb-b526-803858c9dccf" + } + `, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Identity: acpUtils.Actor1Identity, + Doc: ` + { + "name": "PrivateEmp in PrivateCompany", + "salary": 40000, + "company": "bae-4aef4bd6-e2ee-5075-85a5-4d64bbf80bca" + } + `, + }, + } +} diff --git a/tests/integration/acp/query/relation_objects_test.go b/tests/integration/acp/query/relation_objects_test.go new file mode 100644 index 0000000000..76bd264ac8 --- /dev/null +++ b/tests/integration/acp/query/relation_objects_test.go @@ -0,0 +1,242 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" + acpUtils "github.com/sourcenetwork/defradb/tests/integration/acp" +) + +func TestACP_QueryManyToOneRelationObjectsWithoutIdentity(t *testing.T) { + test := testUtils.TestCase{ + Description: "Test acp, query employees with their companies without identity", + + Actions: []any{ + getSetupEmployeeCompanyActions(), + + testUtils.Request{ + Request: ` + query { + Employee { + name + company { + name + } + } + } + `, + Results: []map[string]any{ + { + "name": "PubEmp in PrivateCompany", + "company": nil, + }, + { + "name": "PubEmp in PubCompany", + "company": map[string]any{"name": "Public Company"}, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_QueryOneToManyRelationObjectsWithoutIdentity(t *testing.T) { + test := testUtils.TestCase{ + Description: "Test acp, query companies with their employees without identity", + + Actions: []any{ + getSetupEmployeeCompanyActions(), + + testUtils.Request{ + Request: ` + query { + Company { + name + employees { + name + } + } + } + `, + Results: []map[string]any{ + { + "name": "Public Company", + "employees": []map[string]any{ + {"name": "PubEmp in PubCompany"}, + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_QueryManyToOneRelationObjectsWithIdentity(t *testing.T) { + test := testUtils.TestCase{ + Description: "Test acp, query employees with their companies with identity", + + Actions: []any{ + getSetupEmployeeCompanyActions(), + + testUtils.Request{ + Identity: acpUtils.Actor1Identity, + Request: ` + query { + Employee { + name + company { + name + } + } + } + `, + Results: []map[string]any{ + { + "name": "PrivateEmp in PubCompany", + "company": map[string]any{"name": "Public Company"}, + }, + { + "name": "PrivateEmp in PrivateCompany", + "company": map[string]any{"name": "Private Company"}, + }, + { + "name": "PubEmp in PrivateCompany", + "company": map[string]any{"name": "Private Company"}, + }, + { + "name": "PubEmp in PubCompany", + "company": map[string]any{"name": "Public Company"}, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_QueryOneToManyRelationObjectsWithIdentity(t *testing.T) { + test := testUtils.TestCase{ + Description: "Test acp, query companies with their employees with identity", + + Actions: []any{ + getSetupEmployeeCompanyActions(), + + testUtils.Request{ + Identity: acpUtils.Actor1Identity, + Request: ` + query { + Company { + name + employees { + name + } + } + } + `, + Results: []map[string]any{ + { + "name": "Public Company", + "employees": []map[string]any{ + {"name": "PrivateEmp in PubCompany"}, + {"name": "PubEmp in PubCompany"}, + }, + }, + { + "name": "Private Company", + "employees": []map[string]any{ + {"name": "PrivateEmp in PrivateCompany"}, + {"name": "PubEmp in PrivateCompany"}, + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_QueryManyToOneRelationObjectsWithWrongIdentity(t *testing.T) { + test := testUtils.TestCase{ + Description: "Test acp, query employees with their companies with wrong identity", + + Actions: []any{ + getSetupEmployeeCompanyActions(), + + testUtils.Request{ + Identity: acpUtils.Actor2Identity, + Request: ` + query { + Employee { + name + company { + name + } + } + } + `, + Results: []map[string]any{ + { + "name": "PubEmp in PrivateCompany", + "company": nil, + }, + { + "name": "PubEmp in PubCompany", + "company": map[string]any{"name": "Public Company"}, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_QueryOneToManyRelationObjectsWithWrongIdentity(t *testing.T) { + test := testUtils.TestCase{ + Description: "Test acp, query companies with their employees with wrong identity", + + Actions: []any{ + getSetupEmployeeCompanyActions(), + + testUtils.Request{ + Identity: acpUtils.Actor2Identity, + Request: ` + query { + Company { + name + employees { + name + } + } + } + `, + Results: []map[string]any{ + { + "name": "Public Company", + "employees": []map[string]any{ + {"name": "PubEmp in PubCompany"}, + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/register_and_delete_test.go b/tests/integration/acp/register_and_delete_test.go new file mode 100644 index 0000000000..5d0baf0762 --- /dev/null +++ b/tests/integration/acp/register_and_delete_test.go @@ -0,0 +1,514 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestACP_CreateWithoutIdentityAndDeleteWithoutIdentity_CanDelete(t *testing.T) { + // The same identity that is used to do the registering/creation should be used in the + // final read check to see the state of that registered document. + // Note: In this test that identity is empty (no identity). + + test := testUtils.TestCase{ + + Description: "Test acp, create without identity, and delete without identity, can delete", + + Actions: []any{ + testUtils.AddPolicy{ + + Identity: Actor1Identity, + + Policy: ` + description: a test policy which marks a collection in a database as a resource + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + admin: + manages: + - reader + types: + - actor + `, + + ExpectedPolicyID: "53980e762616fcffbe76307995895e862f87ef3f21d509325d1dc772a770b001", + }, + + testUtils.SchemaUpdate{ + Schema: ` + type Users @policy( + id: "53980e762616fcffbe76307995895e862f87ef3f21d509325d1dc772a770b001", + resource: "users" + ) { + name: String + age: Int + } + `, + }, + + testUtils.CreateDoc{ + CollectionID: 0, + + Doc: ` + { + "name": "Shahzad", + "age": 28 + } + `, + }, + + testUtils.DeleteDoc{ + CollectionID: 0, + + DocID: 0, + }, + + testUtils.Request{ + Request: ` + query { + Users { + _docID + name + age + } + } + `, + + Results: []map[string]any{}, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_CreateWithoutIdentityAndDeleteWithIdentity_CanDelete(t *testing.T) { + // The same identity that is used to do the registering/creation should be used in the + // final read check to see the state of that registered document. + // Note: In this test that identity is empty (no identity). + + test := testUtils.TestCase{ + + Description: "Test acp, create without identity, and delete with identity, can delete", + + Actions: []any{ + testUtils.AddPolicy{ + + Identity: Actor1Identity, + + Policy: ` + description: a test policy which marks a collection in a database as a resource + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + admin: + manages: + - reader + types: + - actor + `, + + ExpectedPolicyID: "53980e762616fcffbe76307995895e862f87ef3f21d509325d1dc772a770b001", + }, + + testUtils.SchemaUpdate{ + Schema: ` + type Users @policy( + id: "53980e762616fcffbe76307995895e862f87ef3f21d509325d1dc772a770b001", + resource: "users" + ) { + name: String + age: Int + } + `, + }, + + testUtils.CreateDoc{ + CollectionID: 0, + + Doc: ` + { + "name": "Shahzad", + "age": 28 + } + `, + }, + + testUtils.DeleteDoc{ + CollectionID: 0, + + Identity: Actor1Identity, + + DocID: 0, + }, + + testUtils.Request{ + Request: ` + query { + Users { + _docID + name + age + } + } + `, + Results: []map[string]any{}, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_CreateWithIdentityAndDeleteWithIdentity_CanDelete(t *testing.T) { + // OwnerIdentity should be the same identity that is used to do the registering/creation, + // and the final read check to see the state of that registered document. + OwnerIdentity := Actor1Identity + + test := testUtils.TestCase{ + + Description: "Test acp, create with identity, and delete with identity, can delete", + + Actions: []any{ + testUtils.AddPolicy{ + + Identity: OwnerIdentity, + + Policy: ` + description: a test policy which marks a collection in a database as a resource + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + admin: + manages: + - reader + types: + - actor + `, + + ExpectedPolicyID: "53980e762616fcffbe76307995895e862f87ef3f21d509325d1dc772a770b001", + }, + + testUtils.SchemaUpdate{ + Schema: ` + type Users @policy( + id: "53980e762616fcffbe76307995895e862f87ef3f21d509325d1dc772a770b001", + resource: "users" + ) { + name: String + age: Int + } + `, + }, + + testUtils.CreateDoc{ + CollectionID: 0, + + Identity: OwnerIdentity, + + Doc: ` + { + "name": "Shahzad", + "age": 28 + } + `, + }, + + testUtils.DeleteDoc{ + CollectionID: 0, + + Identity: OwnerIdentity, + + DocID: 0, + }, + + testUtils.Request{ + Identity: OwnerIdentity, + + Request: ` + query { + Users { + _docID + name + age + } + } + `, + Results: []map[string]any{}, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_CreateWithIdentityAndDeleteWithoutIdentity_CanNotDelete(t *testing.T) { + // OwnerIdentity should be the same identity that is used to do the registering/creation, + // and the final read check to see the state of that registered document. + OwnerIdentity := Actor1Identity + + test := testUtils.TestCase{ + + Description: "Test acp, create with identity, and delete without identity, can not delete", + + Actions: []any{ + testUtils.AddPolicy{ + + Identity: OwnerIdentity, + + Policy: ` + description: a test policy which marks a collection in a database as a resource + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + admin: + manages: + - reader + types: + - actor + `, + + ExpectedPolicyID: "53980e762616fcffbe76307995895e862f87ef3f21d509325d1dc772a770b001", + }, + + testUtils.SchemaUpdate{ + Schema: ` + type Users @policy( + id: "53980e762616fcffbe76307995895e862f87ef3f21d509325d1dc772a770b001", + resource: "users" + ) { + name: String + age: Int + } + `, + }, + + testUtils.CreateDoc{ + CollectionID: 0, + + Identity: OwnerIdentity, + + Doc: ` + { + "name": "Shahzad", + "age": 28 + } + `, + }, + + testUtils.DeleteDoc{ + CollectionID: 0, + + DocID: 0, + + ExpectedError: "document not found or not authorized to access", + }, + + testUtils.Request{ + Identity: OwnerIdentity, + + Request: ` + query { + Users { + _docID + name + age + } + } + `, + Results: []map[string]any{ + { + "_docID": "bae-1e608f7d-b01e-5dd5-ad4a-9c6cc3005a36", + "name": "Shahzad", + "age": int64(28), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_CreateWithIdentityAndDeleteWithWrongIdentity_CanNotDelete(t *testing.T) { + // OwnerIdentity should be the same identity that is used to do the registering/creation, + // and the final read check to see the state of that registered document. + OwnerIdentity := Actor1Identity + + WrongIdentity := Actor2Identity + + test := testUtils.TestCase{ + + Description: "Test acp, create with identity, and delete without identity, can not delete", + + Actions: []any{ + testUtils.AddPolicy{ + + Identity: OwnerIdentity, + + Policy: ` + description: a test policy which marks a collection in a database as a resource + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + admin: + manages: + - reader + types: + - actor + `, + + ExpectedPolicyID: "53980e762616fcffbe76307995895e862f87ef3f21d509325d1dc772a770b001", + }, + + testUtils.SchemaUpdate{ + Schema: ` + type Users @policy( + id: "53980e762616fcffbe76307995895e862f87ef3f21d509325d1dc772a770b001", + resource: "users" + ) { + name: String + age: Int + } + `, + }, + + testUtils.CreateDoc{ + CollectionID: 0, + + Identity: OwnerIdentity, + + Doc: ` + { + "name": "Shahzad", + "age": 28 + } + `, + }, + + testUtils.DeleteDoc{ + CollectionID: 0, + + Identity: WrongIdentity, + + DocID: 0, + + ExpectedError: "document not found or not authorized to access", + }, + + testUtils.Request{ + Identity: OwnerIdentity, + + Request: ` + query { + Users { + _docID + name + age + } + } + `, + Results: []map[string]any{ + { + "_docID": "bae-1e608f7d-b01e-5dd5-ad4a-9c6cc3005a36", + "name": "Shahzad", + "age": int64(28), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/register_and_read_test.go b/tests/integration/acp/register_and_read_test.go new file mode 100644 index 0000000000..a2620b82d7 --- /dev/null +++ b/tests/integration/acp/register_and_read_test.go @@ -0,0 +1,457 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestACP_CreateWithoutIdentityAndReadWithoutIdentity_CanRead(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Test acp, create without identity, and read without identity, can read", + + Actions: []any{ + testUtils.AddPolicy{ + + Identity: Actor1Identity, + + Policy: ` + description: a test policy which marks a collection in a database as a resource + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + admin: + manages: + - reader + types: + - actor + `, + + ExpectedPolicyID: "53980e762616fcffbe76307995895e862f87ef3f21d509325d1dc772a770b001", + }, + + testUtils.SchemaUpdate{ + Schema: ` + type Users @policy( + id: "53980e762616fcffbe76307995895e862f87ef3f21d509325d1dc772a770b001", + resource: "users" + ) { + name: String + age: Int + } + `, + }, + + testUtils.CreateDoc{ + CollectionID: 0, + + Doc: ` + { + "name": "Shahzad", + "age": 28 + } + `, + }, + + testUtils.Request{ + Request: ` + query { + Users { + _docID + name + age + } + } + `, + Results: []map[string]any{ + { + "_docID": "bae-1e608f7d-b01e-5dd5-ad4a-9c6cc3005a36", + "name": "Shahzad", + "age": int64(28), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_CreateWithoutIdentityAndReadWithIdentity_CanRead(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Test acp, create without identity, and read with identity, can read", + + Actions: []any{ + testUtils.AddPolicy{ + + Identity: Actor1Identity, + + Policy: ` + description: a test policy which marks a collection in a database as a resource + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + admin: + manages: + - reader + types: + - actor + `, + + ExpectedPolicyID: "53980e762616fcffbe76307995895e862f87ef3f21d509325d1dc772a770b001", + }, + + testUtils.SchemaUpdate{ + Schema: ` + type Users @policy( + id: "53980e762616fcffbe76307995895e862f87ef3f21d509325d1dc772a770b001", + resource: "users" + ) { + name: String + age: Int + } + `, + }, + + testUtils.CreateDoc{ + CollectionID: 0, + + Doc: ` + { + "name": "Shahzad", + "age": 28 + } + `, + }, + + testUtils.Request{ + Identity: Actor1Identity, + + Request: ` + query { + Users { + _docID + name + age + } + } + `, + Results: []map[string]any{ + { + "_docID": "bae-1e608f7d-b01e-5dd5-ad4a-9c6cc3005a36", + "name": "Shahzad", + "age": int64(28), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_CreateWithIdentityAndReadWithIdentity_CanRead(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Test acp, create with identity, and read with identity, can read", + + Actions: []any{ + testUtils.AddPolicy{ + + Identity: Actor1Identity, + + Policy: ` + description: a test policy which marks a collection in a database as a resource + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + admin: + manages: + - reader + types: + - actor + `, + + ExpectedPolicyID: "53980e762616fcffbe76307995895e862f87ef3f21d509325d1dc772a770b001", + }, + + testUtils.SchemaUpdate{ + Schema: ` + type Users @policy( + id: "53980e762616fcffbe76307995895e862f87ef3f21d509325d1dc772a770b001", + resource: "users" + ) { + name: String + age: Int + } + `, + }, + + testUtils.CreateDoc{ + CollectionID: 0, + + Identity: Actor1Identity, + + Doc: ` + { + "name": "Shahzad", + "age": 28 + } + `, + }, + + testUtils.Request{ + Identity: Actor1Identity, + + Request: ` + query { + Users { + _docID + name + age + } + } + `, + Results: []map[string]any{ + { + "_docID": "bae-1e608f7d-b01e-5dd5-ad4a-9c6cc3005a36", + "name": "Shahzad", + "age": int64(28), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_CreateWithIdentityAndReadWithoutIdentity_CanNotRead(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Test acp, create with identity, and read without identity, can not read", + + Actions: []any{ + testUtils.AddPolicy{ + + Identity: Actor1Identity, + + Policy: ` + description: a test policy which marks a collection in a database as a resource + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + admin: + manages: + - reader + types: + - actor + `, + + ExpectedPolicyID: "53980e762616fcffbe76307995895e862f87ef3f21d509325d1dc772a770b001", + }, + + testUtils.SchemaUpdate{ + Schema: ` + type Users @policy( + id: "53980e762616fcffbe76307995895e862f87ef3f21d509325d1dc772a770b001", + resource: "users" + ) { + name: String + age: Int + } + `, + }, + + testUtils.CreateDoc{ + CollectionID: 0, + + Identity: Actor1Identity, + + Doc: ` + { + "name": "Shahzad", + "age": 28 + } + `, + }, + + testUtils.Request{ + Request: ` + query { + Users { + _docID + name + age + } + } + `, + Results: []map[string]any{}, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_CreateWithIdentityAndReadWithWrongIdentity_CanNotRead(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Test acp, create with identity, and read without identity, can not read", + + Actions: []any{ + testUtils.AddPolicy{ + + Identity: Actor1Identity, + + Policy: ` + description: a test policy which marks a collection in a database as a resource + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + admin: + manages: + - reader + types: + - actor + `, + + ExpectedPolicyID: "53980e762616fcffbe76307995895e862f87ef3f21d509325d1dc772a770b001", + }, + + testUtils.SchemaUpdate{ + Schema: ` + type Users @policy( + id: "53980e762616fcffbe76307995895e862f87ef3f21d509325d1dc772a770b001", + resource: "users" + ) { + name: String + age: Int + } + `, + }, + + testUtils.CreateDoc{ + CollectionID: 0, + + Identity: Actor1Identity, + + Doc: ` + { + "name": "Shahzad", + "age": 28 + } + `, + }, + + testUtils.Request{ + Identity: Actor2Identity, + + Request: ` + query { + Users { + _docID + name + age + } + } + `, + Results: []map[string]any{}, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/register_and_update_test.go b/tests/integration/acp/register_and_update_test.go new file mode 100644 index 0000000000..96810409db --- /dev/null +++ b/tests/integration/acp/register_and_update_test.go @@ -0,0 +1,810 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp + +import ( + "testing" + + "github.com/sourcenetwork/immutable" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestACP_CreateWithoutIdentityAndUpdateWithoutIdentity_CanUpdate(t *testing.T) { + // The same identity that is used to do the registering/creation should be used in the + // final read check to see the state of that registered document. + // Note: In this test that identity is empty (no identity). + + test := testUtils.TestCase{ + + Description: "Test acp, create without identity, and update without identity, can update", + + Actions: []any{ + testUtils.AddPolicy{ + + Identity: Actor1Identity, + + Policy: ` + description: a test policy which marks a collection in a database as a resource + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + admin: + manages: + - reader + types: + - actor + `, + + ExpectedPolicyID: "53980e762616fcffbe76307995895e862f87ef3f21d509325d1dc772a770b001", + }, + + testUtils.SchemaUpdate{ + Schema: ` + type Users @policy( + id: "53980e762616fcffbe76307995895e862f87ef3f21d509325d1dc772a770b001", + resource: "users" + ) { + name: String + age: Int + } + `, + }, + + testUtils.CreateDoc{ + CollectionID: 0, + + Doc: ` + { + "name": "Shahzad", + "age": 28 + } + `, + }, + + testUtils.UpdateDoc{ + CollectionID: 0, + + DocID: 0, + + Doc: ` + { + "name": "Shahzad Lone" + } + `, + }, + + testUtils.Request{ + Request: ` + query { + Users { + _docID + name + age + } + } + `, + + Results: []map[string]any{ + { + "_docID": "bae-1e608f7d-b01e-5dd5-ad4a-9c6cc3005a36", + "name": "Shahzad Lone", + "age": int64(28), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_CreateWithoutIdentityAndUpdateWithIdentity_CanUpdate(t *testing.T) { + // The same identity that is used to do the registering/creation should be used in the + // final read check to see the state of that registered document. + // Note: In this test that identity is empty (no identity). + + test := testUtils.TestCase{ + + Description: "Test acp, create without identity, and update with identity, can update", + + Actions: []any{ + testUtils.AddPolicy{ + + Identity: Actor1Identity, + + Policy: ` + description: a test policy which marks a collection in a database as a resource + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + admin: + manages: + - reader + types: + - actor + `, + + ExpectedPolicyID: "53980e762616fcffbe76307995895e862f87ef3f21d509325d1dc772a770b001", + }, + + testUtils.SchemaUpdate{ + Schema: ` + type Users @policy( + id: "53980e762616fcffbe76307995895e862f87ef3f21d509325d1dc772a770b001", + resource: "users" + ) { + name: String + age: Int + } + `, + }, + + testUtils.CreateDoc{ + CollectionID: 0, + + Doc: ` + { + "name": "Shahzad", + "age": 28 + } + `, + }, + + testUtils.UpdateDoc{ + CollectionID: 0, + + Identity: Actor1Identity, + + DocID: 0, + + Doc: ` + { + "name": "Shahzad Lone" + } + `, + }, + + testUtils.Request{ + Request: ` + query { + Users { + _docID + name + age + } + } + `, + Results: []map[string]any{ + { + "_docID": "bae-1e608f7d-b01e-5dd5-ad4a-9c6cc3005a36", + "name": "Shahzad Lone", + "age": int64(28), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_CreateWithIdentityAndUpdateWithIdentity_CanUpdate(t *testing.T) { + // OwnerIdentity should be the same identity that is used to do the registering/creation, + // and the final read check to see the state of that registered document. + OwnerIdentity := Actor1Identity + + test := testUtils.TestCase{ + + Description: "Test acp, create with identity, and update with identity, can update", + + Actions: []any{ + testUtils.AddPolicy{ + + Identity: OwnerIdentity, + + Policy: ` + description: a test policy which marks a collection in a database as a resource + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + admin: + manages: + - reader + types: + - actor + `, + + ExpectedPolicyID: "53980e762616fcffbe76307995895e862f87ef3f21d509325d1dc772a770b001", + }, + + testUtils.SchemaUpdate{ + Schema: ` + type Users @policy( + id: "53980e762616fcffbe76307995895e862f87ef3f21d509325d1dc772a770b001", + resource: "users" + ) { + name: String + age: Int + } + `, + }, + + testUtils.CreateDoc{ + CollectionID: 0, + + Identity: OwnerIdentity, + + Doc: ` + { + "name": "Shahzad", + "age": 28 + } + `, + }, + + testUtils.UpdateDoc{ + CollectionID: 0, + + Identity: OwnerIdentity, + + DocID: 0, + + Doc: ` + { + "name": "Shahzad Lone" + } + `, + }, + + testUtils.Request{ + Identity: OwnerIdentity, + + Request: ` + query { + Users { + _docID + name + age + } + } + `, + Results: []map[string]any{ + { + "_docID": "bae-1e608f7d-b01e-5dd5-ad4a-9c6cc3005a36", + "name": "Shahzad Lone", + "age": int64(28), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_CreateWithIdentityAndUpdateWithoutIdentity_CanNotUpdate(t *testing.T) { + // OwnerIdentity should be the same identity that is used to do the registering/creation, + // and the final read check to see the state of that registered document. + OwnerIdentity := Actor1Identity + + test := testUtils.TestCase{ + + Description: "Test acp, create with identity, and update without identity, can not update", + + SupportedMutationTypes: immutable.Some([]testUtils.MutationType{ + // GQL mutation will return no error when wrong identity is used so test that separately. + testUtils.CollectionNamedMutationType, + testUtils.CollectionSaveMutationType, + }), + + Actions: []any{ + testUtils.AddPolicy{ + + Identity: OwnerIdentity, + + Policy: ` + description: a test policy which marks a collection in a database as a resource + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + admin: + manages: + - reader + types: + - actor + `, + + ExpectedPolicyID: "53980e762616fcffbe76307995895e862f87ef3f21d509325d1dc772a770b001", + }, + + testUtils.SchemaUpdate{ + Schema: ` + type Users @policy( + id: "53980e762616fcffbe76307995895e862f87ef3f21d509325d1dc772a770b001", + resource: "users" + ) { + name: String + age: Int + } + `, + }, + + testUtils.CreateDoc{ + CollectionID: 0, + + Identity: OwnerIdentity, + + Doc: ` + { + "name": "Shahzad", + "age": 28 + } + `, + }, + + testUtils.UpdateDoc{ + CollectionID: 0, + + DocID: 0, + + Doc: ` + { + "name": "Shahzad Lone" + } + `, + + ExpectedError: "document not found or not authorized to access", + }, + + testUtils.Request{ + Identity: OwnerIdentity, + + Request: ` + query { + Users { + _docID + name + age + } + } + `, + Results: []map[string]any{ + { + "_docID": "bae-1e608f7d-b01e-5dd5-ad4a-9c6cc3005a36", + "name": "Shahzad", + "age": int64(28), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_CreateWithIdentityAndUpdateWithWrongIdentity_CanNotUpdate(t *testing.T) { + // OwnerIdentity should be the same identity that is used to do the registering/creation, + // and the final read check to see the state of that registered document. + OwnerIdentity := Actor1Identity + + WrongIdentity := Actor2Identity + + test := testUtils.TestCase{ + + Description: "Test acp, create with identity, and update without identity, can not update", + + SupportedMutationTypes: immutable.Some([]testUtils.MutationType{ + // GQL mutation will return no error when wrong identity is used so test that separately. + testUtils.CollectionNamedMutationType, + testUtils.CollectionSaveMutationType, + }), + + Actions: []any{ + testUtils.AddPolicy{ + + Identity: OwnerIdentity, + + Policy: ` + description: a test policy which marks a collection in a database as a resource + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + admin: + manages: + - reader + types: + - actor + `, + + ExpectedPolicyID: "53980e762616fcffbe76307995895e862f87ef3f21d509325d1dc772a770b001", + }, + + testUtils.SchemaUpdate{ + Schema: ` + type Users @policy( + id: "53980e762616fcffbe76307995895e862f87ef3f21d509325d1dc772a770b001", + resource: "users" + ) { + name: String + age: Int + } + `, + }, + + testUtils.CreateDoc{ + CollectionID: 0, + + Identity: OwnerIdentity, + + Doc: ` + { + "name": "Shahzad", + "age": 28 + } + `, + }, + + testUtils.UpdateDoc{ + CollectionID: 0, + + Identity: WrongIdentity, + + DocID: 0, + + Doc: ` + { + "name": "Shahzad Lone" + } + `, + + ExpectedError: "document not found or not authorized to access", + }, + + testUtils.Request{ + Identity: OwnerIdentity, + + Request: ` + query { + Users { + _docID + name + age + } + } + `, + Results: []map[string]any{ + { + "_docID": "bae-1e608f7d-b01e-5dd5-ad4a-9c6cc3005a36", + "name": "Shahzad", + "age": int64(28), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +// This separate GQL test should be merged with the ones above when all the clients are fixed +// to behave the same in: https://github.com/sourcenetwork/defradb/issues/2410 +func TestACP_CreateWithIdentityAndUpdateWithoutIdentityGQL_CanNotUpdate(t *testing.T) { + // OwnerIdentity should be the same identity that is used to do the registering/creation, + // and the final read check to see the state of that registered document. + OwnerIdentity := Actor1Identity + + test := testUtils.TestCase{ + + Description: "Test acp, create with identity, and update without identity (gql), can not update", + + SupportedMutationTypes: immutable.Some([]testUtils.MutationType{ + // GQL mutation will return no error when wrong identity is used so test that separately. + testUtils.GQLRequestMutationType, + }), + + Actions: []any{ + testUtils.AddPolicy{ + + Identity: OwnerIdentity, + + Policy: ` + description: a test policy which marks a collection in a database as a resource + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + admin: + manages: + - reader + types: + - actor + `, + + ExpectedPolicyID: "53980e762616fcffbe76307995895e862f87ef3f21d509325d1dc772a770b001", + }, + + testUtils.SchemaUpdate{ + Schema: ` + type Users @policy( + id: "53980e762616fcffbe76307995895e862f87ef3f21d509325d1dc772a770b001", + resource: "users" + ) { + name: String + age: Int + } + `, + }, + + testUtils.CreateDoc{ + CollectionID: 0, + + Identity: OwnerIdentity, + + Doc: ` + { + "name": "Shahzad", + "age": 28 + } + `, + }, + + testUtils.UpdateDoc{ + CollectionID: 0, + + DocID: 0, + + Doc: ` + { + "name": "Shahzad Lone" + } + `, + }, + + testUtils.Request{ + Identity: OwnerIdentity, + + Request: ` + query { + Users { + _docID + name + age + } + } + `, + Results: []map[string]any{ + { + "_docID": "bae-1e608f7d-b01e-5dd5-ad4a-9c6cc3005a36", + "name": "Shahzad", + "age": int64(28), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +// This separate GQL test should be merged with the ones above when all the clients are fixed +// to behave the same in: https://github.com/sourcenetwork/defradb/issues/2410 +func TestACP_CreateWithIdentityAndUpdateWithWrongIdentityGQL_CanNotUpdate(t *testing.T) { + // OwnerIdentity should be the same identity that is used to do the registering/creation, + // and the final read check to see the state of that registered document. + OwnerIdentity := Actor1Identity + + WrongIdentity := Actor2Identity + + test := testUtils.TestCase{ + + Description: "Test acp, create with identity, and update without identity (gql), can not update", + + SupportedMutationTypes: immutable.Some([]testUtils.MutationType{ + // GQL mutation will return no error when wrong identity is used so test that separately. + testUtils.GQLRequestMutationType, + }), + + Actions: []any{ + testUtils.AddPolicy{ + + Identity: OwnerIdentity, + + Policy: ` + description: a test policy which marks a collection in a database as a resource + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + admin: + manages: + - reader + types: + - actor + `, + + ExpectedPolicyID: "53980e762616fcffbe76307995895e862f87ef3f21d509325d1dc772a770b001", + }, + + testUtils.SchemaUpdate{ + Schema: ` + type Users @policy( + id: "53980e762616fcffbe76307995895e862f87ef3f21d509325d1dc772a770b001", + resource: "users" + ) { + name: String + age: Int + } + `, + }, + + testUtils.CreateDoc{ + CollectionID: 0, + + Identity: OwnerIdentity, + + Doc: ` + { + "name": "Shahzad", + "age": 28 + } + `, + }, + + testUtils.UpdateDoc{ + CollectionID: 0, + + Identity: WrongIdentity, + + DocID: 0, + + Doc: ` + { + "name": "Shahzad Lone" + } + `, + }, + + testUtils.Request{ + Identity: OwnerIdentity, + + Request: ` + query { + Users { + _docID + name + age + } + } + `, + Results: []map[string]any{ + { + "_docID": "bae-1e608f7d-b01e-5dd5-ad4a-9c6cc3005a36", + "name": "Shahzad", + "age": int64(28), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/schema/add_dpi/README.md b/tests/integration/acp/schema/add_dpi/README.md new file mode 100644 index 0000000000..4bb0b065c9 --- /dev/null +++ b/tests/integration/acp/schema/add_dpi/README.md @@ -0,0 +1,7 @@ +## Accept vs Reject: +- All tests are broken into `accept_*_test.go` and `reject_*_test.go` files. +- Accepted tests are with valid DPIs (hence schema is accepted). +- Rejected tests are with invalid DPIs (hence schema is rejected). +- There are also some Partially-DPI tests that are both accepted and rejected depending on the resource. + +Learn more about the DefraDB Policy Interface [DPI](/acp/README.md) diff --git a/tests/integration/acp/schema/add_dpi/accept_basic_dpi_fmts_test.go b/tests/integration/acp/schema/add_dpi/accept_basic_dpi_fmts_test.go new file mode 100644 index 0000000000..cfc668a25c --- /dev/null +++ b/tests/integration/acp/schema/add_dpi/accept_basic_dpi_fmts_test.go @@ -0,0 +1,214 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_schema_add_dpi + +import ( + "fmt" + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" + schemaUtils "github.com/sourcenetwork/defradb/tests/integration/schema" +) + +func TestACP_AddDPISchema_BasicYAML_SchemaAccepted(t *testing.T) { + policyIDOfValidDPI := "dfe202ffb4f0fe9b46157c313213a3839e08a6f0a7c3aba55e4724cb49ffde8a" + + test := testUtils.TestCase{ + + Description: "Test acp, specify basic policy that was added in YAML format, accept schema", + + Actions: []any{ + testUtils.AddPolicy{ + + Identity: actor1Identity, + + Policy: ` + description: a basic policy that satisfies minimum DPI requirements + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + write: + expr: owner + + relations: + owner: + types: + - actor + + `, + + ExpectedPolicyID: policyIDOfValidDPI, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + policyIDOfValidDPI, + ), + }, + + testUtils.IntrospectionRequest{ + Request: ` + query { + __type (name: "Users") { + name + fields { + name + type { + name + kind + } + } + } + } + `, + ExpectedData: map[string]any{ + "__type": map[string]any{ + "name": "Users", // NOTE: "Users" MUST exist + "fields": schemaUtils.DefaultFields.Append( + schemaUtils.Field{ + "name": "name", + "type": map[string]any{ + "kind": "SCALAR", + "name": "String", + }, + }, + ).Append( + schemaUtils.Field{ + "name": "age", + "type": map[string]any{ + "kind": "SCALAR", + "name": "Int", + }, + }, + ).Tidy(), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_AddDPISchema_BasicJSON_SchemaAccepted(t *testing.T) { + policyIDOfValidDPI := "dfe202ffb4f0fe9b46157c313213a3839e08a6f0a7c3aba55e4724cb49ffde8a" + + test := testUtils.TestCase{ + + Description: "Test acp, specify basic policy that was added in JSON format, accept schema", + + Actions: []any{ + testUtils.AddPolicy{ + + Identity: actor1Identity, + + Policy: ` + { + "description": "a basic policy that satisfies minimum DPI requirements", + "resources": { + "users": { + "permissions": { + "read": { + "expr": "owner" + }, + "write": { + "expr": "owner" + } + }, + "relations": { + "owner": { + "types": [ + "actor" + ] + } + } + } + }, + "actor": { + "name": "actor" + } + } + `, + + ExpectedPolicyID: policyIDOfValidDPI, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + policyIDOfValidDPI, + ), + }, + + testUtils.IntrospectionRequest{ + Request: ` + query { + __type (name: "Users") { + name + fields { + name + type { + name + kind + } + } + } + } + `, + ExpectedData: map[string]any{ + "__type": map[string]any{ + "name": "Users", // NOTE: "Users" MUST exist + "fields": schemaUtils.DefaultFields.Append( + schemaUtils.Field{ + "name": "name", + "type": map[string]any{ + "kind": "SCALAR", + "name": "String", + }, + }, + ).Append( + schemaUtils.Field{ + "name": "age", + "type": map[string]any{ + "kind": "SCALAR", + "name": "Int", + }, + }, + ).Tidy(), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/schema/add_dpi/accept_extra_permissions_on_dpi_test.go b/tests/integration/acp/schema/add_dpi/accept_extra_permissions_on_dpi_test.go new file mode 100644 index 0000000000..348736a58c --- /dev/null +++ b/tests/integration/acp/schema/add_dpi/accept_extra_permissions_on_dpi_test.go @@ -0,0 +1,316 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_schema_add_dpi + +import ( + "fmt" + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" + schemaUtils "github.com/sourcenetwork/defradb/tests/integration/schema" +) + +func TestACP_AddDPISchema_WithExtraPermsHavingRequiredRelation_AcceptSchema(t *testing.T) { + policyIDOfValidDPI := "16e39e650d4cbd5161ae0c572edad6f7e2950c1c4afa37e427af3c8708e68f0f" + + test := testUtils.TestCase{ + + Description: "Test acp, add dpi schema, with extra permissions having required relation, schema accepted", + + Actions: []any{ + + testUtils.AddPolicy{ + + Identity: actor1Identity, + + Policy: ` + description: A Valid Defra Policy Interface (DPI) + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + write: + expr: owner + reader + magic: + expr: owner - reader + + relations: + owner: + types: + - actor + reader: + types: + - actor + `, + + ExpectedPolicyID: policyIDOfValidDPI, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + policyIDOfValidDPI, + ), + }, + + testUtils.IntrospectionRequest{ + Request: ` + query { + __type (name: "Users") { + name + fields { + name + type { + name + kind + } + } + } + } + `, + ExpectedData: map[string]any{ + "__type": map[string]any{ + "name": "Users", // NOTE: "Users" MUST exist + "fields": schemaUtils.DefaultFields.Append( + schemaUtils.Field{ + "name": "name", + "type": map[string]any{ + "kind": "SCALAR", + "name": "String", + }, + }, + ).Append( + schemaUtils.Field{ + "name": "age", + "type": map[string]any{ + "kind": "SCALAR", + "name": "Int", + }, + }, + ).Tidy(), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_AddDPISchema_WithExtraPermsHavingRequiredRelationInTheEnd_AcceptSchema(t *testing.T) { + policyIDOfValidDPI := "35b6f3db54cfb0f451a4faba77d2c71d8718215caeb5a15a8570dfdba07b694d" + + test := testUtils.TestCase{ + + Description: "Test acp, add dpi schema, with extra permissions having required relation in the end, schema accepted", + + Actions: []any{ + + testUtils.AddPolicy{ + + Identity: actor1Identity, + + Policy: ` + description: A Valid Defra Policy Interface (DPI) + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + write: + expr: owner + magic: + expr: reader & owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + `, + + ExpectedPolicyID: policyIDOfValidDPI, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + policyIDOfValidDPI, + ), + }, + + testUtils.IntrospectionRequest{ + Request: ` + query { + __type (name: "Users") { + name + fields { + name + type { + name + kind + } + } + } + } + `, + ExpectedData: map[string]any{ + "__type": map[string]any{ + "name": "Users", // NOTE: "Users" MUST exist + "fields": schemaUtils.DefaultFields.Append( + schemaUtils.Field{ + "name": "name", + "type": map[string]any{ + "kind": "SCALAR", + "name": "String", + }, + }, + ).Append( + schemaUtils.Field{ + "name": "age", + "type": map[string]any{ + "kind": "SCALAR", + "name": "Int", + }, + }, + ).Tidy(), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_AddDPISchema_WithExtraPermsHavingNoRequiredRelation_AcceptSchema(t *testing.T) { + policyIDOfValidDPI := "7b6266a93bfb6920bf57884f55c3823a5a5147c4ce445a9fc703b7c1e59b2d12" + + test := testUtils.TestCase{ + + Description: "Test acp, add dpi schema, with extra permissions having no required relation, schema accepted", + + Actions: []any{ + + testUtils.AddPolicy{ + + Identity: actor1Identity, + + Policy: ` + description: A Valid Defra Policy Interface (DPI) + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + write: + expr: owner + magic: + expr: reader + + relations: + owner: + types: + - actor + reader: + types: + - actor + `, + + ExpectedPolicyID: policyIDOfValidDPI, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + policyIDOfValidDPI, + ), + }, + + testUtils.IntrospectionRequest{ + Request: ` + query { + __type (name: "Users") { + name + fields { + name + type { + name + kind + } + } + } + } + `, + ExpectedData: map[string]any{ + "__type": map[string]any{ + "name": "Users", // NOTE: "Users" MUST exist + "fields": schemaUtils.DefaultFields.Append( + schemaUtils.Field{ + "name": "name", + "type": map[string]any{ + "kind": "SCALAR", + "name": "String", + }, + }, + ).Append( + schemaUtils.Field{ + "name": "age", + "type": map[string]any{ + "kind": "SCALAR", + "name": "Int", + }, + }, + ).Tidy(), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/schema/add_dpi/accept_managed_relation_on_dpi_test.go b/tests/integration/acp/schema/add_dpi/accept_managed_relation_on_dpi_test.go new file mode 100644 index 0000000000..72f622201f --- /dev/null +++ b/tests/integration/acp/schema/add_dpi/accept_managed_relation_on_dpi_test.go @@ -0,0 +1,121 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_schema_add_dpi + +import ( + "fmt" + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" + schemaUtils "github.com/sourcenetwork/defradb/tests/integration/schema" +) + +func TestACP_AddDPISchema_WithManagedRelation_AcceptSchemas(t *testing.T) { + policyIDOfValidDPI := "53980e762616fcffbe76307995895e862f87ef3f21d509325d1dc772a770b001" + + test := testUtils.TestCase{ + + Description: "Test acp, add dpi schema, where one resource is specified on different schemas, schemas accepted", + + Actions: []any{ + + testUtils.AddPolicy{ + + Identity: actor1Identity, + + Policy: ` + description: A Valid Defra Policy Interface (DPI) + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + admin: + manages: + - reader + types: + - actor + `, + + ExpectedPolicyID: policyIDOfValidDPI, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + policyIDOfValidDPI, + ), + }, + + testUtils.IntrospectionRequest{ + Request: ` + query { + __type (name: "Users") { + name + fields { + name + type { + name + kind + } + } + } + } + `, + ExpectedData: map[string]any{ + "__type": map[string]any{ + "name": "Users", // NOTE: "Users" MUST exist + "fields": schemaUtils.DefaultFields.Append( + schemaUtils.Field{ + "name": "name", + "type": map[string]any{ + "kind": "SCALAR", + "name": "String", + }, + }, + ).Append( + schemaUtils.Field{ + "name": "age", + "type": map[string]any{ + "kind": "SCALAR", + "name": "Int", + }, + }, + ).Tidy(), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/schema/add_dpi/accept_mixed_resources_on_partial_dpi_test.go b/tests/integration/acp/schema/add_dpi/accept_mixed_resources_on_partial_dpi_test.go new file mode 100644 index 0000000000..a991d4b280 --- /dev/null +++ b/tests/integration/acp/schema/add_dpi/accept_mixed_resources_on_partial_dpi_test.go @@ -0,0 +1,131 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_schema_add_dpi + +import ( + "fmt" + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" + schemaUtils "github.com/sourcenetwork/defradb/tests/integration/schema" +) + +func TestACP_AddDPISchema_PartialValidDPIButUseOnlyValidDPIResource_AcceptSchema(t *testing.T) { + policyIDOfPartiallyValidDPI := "d5d411825b2d8fa5a550f1e34153b88b375ed9c9af19ce6d2ba1769e237a45d0" + + test := testUtils.TestCase{ + + Description: "Test acp, add dpi schema, has both valid & invalid resources, but use only valid resource, schema accepted", + + Actions: []any{ + + testUtils.AddPolicy{ + + Identity: actor1Identity, + + Policy: ` + description: A Partially Valid Defra Policy Interface (DPI) + + actor: + name: actor + + resources: + usersValid: + permissions: + read: + expr: owner + reader + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + + usersInvalid: + permissions: + read: + expr: reader - owner + write: + expr: reader + + relations: + owner: + types: + - actor + reader: + types: + - actor + `, + + ExpectedPolicyID: policyIDOfPartiallyValidDPI, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "usersValid" + ) { + name: String + age: Int + } + `, + policyIDOfPartiallyValidDPI, + ), + }, + + testUtils.IntrospectionRequest{ + Request: ` + query { + __type (name: "Users") { + name + fields { + name + type { + name + kind + } + } + } + } + `, + ExpectedData: map[string]any{ + "__type": map[string]any{ + "name": "Users", // NOTE: "Users" MUST exist + "fields": schemaUtils.DefaultFields.Append( + schemaUtils.Field{ + "name": "name", + "type": map[string]any{ + "kind": "SCALAR", + "name": "String", + }, + }, + ).Append( + schemaUtils.Field{ + "name": "age", + "type": map[string]any{ + "kind": "SCALAR", + "name": "Int", + }, + }, + ).Tidy(), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/schema/add_dpi/accept_multi_dpis_test.go b/tests/integration/acp/schema/add_dpi/accept_multi_dpis_test.go new file mode 100644 index 0000000000..0ec43706ee --- /dev/null +++ b/tests/integration/acp/schema/add_dpi/accept_multi_dpis_test.go @@ -0,0 +1,183 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_schema_add_dpi + +import ( + "fmt" + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" + schemaUtils "github.com/sourcenetwork/defradb/tests/integration/schema" +) + +func TestACP_AddDPISchema_AddDuplicateDPIsByOtherCreatorsUseBoth_AcceptSchema(t *testing.T) { + const sameResourceNameOnBothDPI string = "users" + const validDPIUsedByBoth string = ` + description: A Valid Defra Policy Interface (DPI) + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + ` + + const policyIDOfFirstCreatorsDPI string = "4f13c5084c3d0e1e5c5db702fceef84c3b6ab948949ca8e27fcaad3fb8bc39f4" + const policyIDOfSecondCreatorsDPI string = "d33aa07a28ea19ed07a5256eb7e7f5600b0e0af13254889a7fce60202c4f6c7e" + + test := testUtils.TestCase{ + + Description: "Test acp, add duplicate DPIs by different actors, accept both schemas", + + Actions: []any{ + testUtils.AddPolicy{ + + Identity: actor1Identity, + + Policy: validDPIUsedByBoth, + + ExpectedPolicyID: policyIDOfFirstCreatorsDPI, + }, + + testUtils.AddPolicy{ + + Identity: actor2Identity, + + Policy: validDPIUsedByBoth, + + ExpectedPolicyID: policyIDOfSecondCreatorsDPI, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type OldUsers @policy( + id: "%s", + resource: "%s" + ) { + name: String + age: Int + } + `, + policyIDOfFirstCreatorsDPI, + sameResourceNameOnBothDPI, + ), + }, + + testUtils.IntrospectionRequest{ + Request: ` + query { + __type (name: "OldUsers") { + name + fields { + name + type { + name + kind + } + } + } + } + `, + ExpectedData: map[string]any{ + "__type": map[string]any{ + "name": "OldUsers", // NOTE: "OldUsers" MUST exist + "fields": schemaUtils.DefaultFields.Append( + schemaUtils.Field{ + "name": "name", + "type": map[string]any{ + "kind": "SCALAR", + "name": "String", + }, + }, + ).Append( + schemaUtils.Field{ + "name": "age", + "type": map[string]any{ + "kind": "SCALAR", + "name": "Int", + }, + }, + ).Tidy(), + }, + }, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type NewUsers @policy( + id: "%s", + resource: "%s" + ) { + name: String + age: Int + } + `, + policyIDOfSecondCreatorsDPI, + sameResourceNameOnBothDPI, + ), + }, + + testUtils.IntrospectionRequest{ + Request: ` + query { + __type (name: "NewUsers") { + name + fields { + name + type { + name + kind + } + } + } + } + `, + ExpectedData: map[string]any{ + "__type": map[string]any{ + "name": "NewUsers", // NOTE: "NewUsers" MUST exist + "fields": schemaUtils.DefaultFields.Append( + schemaUtils.Field{ + "name": "name", + "type": map[string]any{ + "kind": "SCALAR", + "name": "String", + }, + }, + ).Append( + schemaUtils.Field{ + "name": "age", + "type": map[string]any{ + "kind": "SCALAR", + "name": "Int", + }, + }, + ).Tidy(), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/schema/add_dpi/accept_multi_resources_on_dpi_test.go b/tests/integration/acp/schema/add_dpi/accept_multi_resources_on_dpi_test.go new file mode 100644 index 0000000000..9903bc18d2 --- /dev/null +++ b/tests/integration/acp/schema/add_dpi/accept_multi_resources_on_dpi_test.go @@ -0,0 +1,281 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_schema_add_dpi + +import ( + "fmt" + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" + schemaUtils "github.com/sourcenetwork/defradb/tests/integration/schema" +) + +func TestACP_AddDPISchema_WithMultipleResources_AcceptSchema(t *testing.T) { + policyIDOfValidDPI := "f3e521de628fa607ba11af0e9b53e2fb74ca0e6ea33622003d1f43dbae0ce41d" + + test := testUtils.TestCase{ + + Description: "Test acp, add dpi schema, with multiple resources, schema accepted", + + Actions: []any{ + + testUtils.AddPolicy{ + + Identity: actor1Identity, + + Policy: ` + description: A Valid Defra Policy Interface (DPI) + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + + books: + permissions: + read: + expr: owner + write: + expr: owner + + relations: + owner: + types: + - actor + `, + + ExpectedPolicyID: policyIDOfValidDPI, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + policyIDOfValidDPI, + ), + }, + + testUtils.IntrospectionRequest{ + Request: ` + query { + __type (name: "Users") { + name + fields { + name + type { + name + kind + } + } + } + } + `, + ExpectedData: map[string]any{ + "__type": map[string]any{ + "name": "Users", // NOTE: "Users" MUST exist + "fields": schemaUtils.DefaultFields.Append( + schemaUtils.Field{ + "name": "name", + "type": map[string]any{ + "kind": "SCALAR", + "name": "String", + }, + }, + ).Append( + schemaUtils.Field{ + "name": "age", + "type": map[string]any{ + "kind": "SCALAR", + "name": "Int", + }, + }, + ).Tidy(), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_AddDPISchema_WithMultipleResourcesBothBeingUsed_AcceptSchema(t *testing.T) { + policyIDOfValidDPI := "f3e521de628fa607ba11af0e9b53e2fb74ca0e6ea33622003d1f43dbae0ce41d" + + test := testUtils.TestCase{ + + Description: "Test acp, add dpi schema, with multiple resources both being used, schemas accepted", + + Actions: []any{ + + testUtils.AddPolicy{ + + Identity: actor1Identity, + + Policy: ` + description: A Valid Defra Policy Interface (DPI) + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + + books: + permissions: + read: + expr: owner + write: + expr: owner + + relations: + owner: + types: + - actor + `, + + ExpectedPolicyID: policyIDOfValidDPI, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + policyIDOfValidDPI, + ), + }, + + testUtils.IntrospectionRequest{ + Request: ` + query { + __type (name: "Users") { + name + fields { + name + type { + name + kind + } + } + } + } + `, + ExpectedData: map[string]any{ + "__type": map[string]any{ + "name": "Users", // NOTE: "Users" MUST exist + "fields": schemaUtils.DefaultFields.Append( + schemaUtils.Field{ + "name": "name", + "type": map[string]any{ + "kind": "SCALAR", + "name": "String", + }, + }, + ).Append( + schemaUtils.Field{ + "name": "age", + "type": map[string]any{ + "kind": "SCALAR", + "name": "Int", + }, + }, + ).Tidy(), + }, + }, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Books @policy( + id: "%s", + resource: "books" + ) { + name: String + } + `, + policyIDOfValidDPI, + ), + }, + + testUtils.IntrospectionRequest{ + Request: ` + query { + __type (name: "Books") { + name + fields { + name + type { + name + kind + } + } + } + } + `, + ExpectedData: map[string]any{ + "__type": map[string]any{ + "name": "Books", // NOTE: "Books" MUST exist + "fields": schemaUtils.DefaultFields.Append( + schemaUtils.Field{ + "name": "name", + "type": map[string]any{ + "kind": "SCALAR", + "name": "String", + }, + }, + ).Tidy(), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/schema/add_dpi/accept_same_resource_on_diff_schemas_test.go b/tests/integration/acp/schema/add_dpi/accept_same_resource_on_diff_schemas_test.go new file mode 100644 index 0000000000..086a69a1b1 --- /dev/null +++ b/tests/integration/acp/schema/add_dpi/accept_same_resource_on_diff_schemas_test.go @@ -0,0 +1,172 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_schema_add_dpi + +import ( + "fmt" + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" + schemaUtils "github.com/sourcenetwork/defradb/tests/integration/schema" +) + +func TestACP_AddDPISchema_UseSameResourceOnDifferentSchemas_AcceptSchemas(t *testing.T) { + policyIDOfValidDPI := "4f13c5084c3d0e1e5c5db702fceef84c3b6ab948949ca8e27fcaad3fb8bc39f4" + sharedSameResourceName := "users" + + test := testUtils.TestCase{ + + Description: "Test acp, add dpi schema, where one resource is specified on different schemas, schemas accepted", + + Actions: []any{ + + testUtils.AddPolicy{ + + Identity: actor1Identity, + + Policy: ` + description: A Valid Defra Policy Interface (DPI) + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + `, + + ExpectedPolicyID: policyIDOfValidDPI, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type OldUsers @policy( + id: "%s", + resource: "%s" + ) { + name: String + age: Int + } + `, + policyIDOfValidDPI, + sharedSameResourceName, + ), + }, + + testUtils.IntrospectionRequest{ + Request: ` + query { + __type (name: "OldUsers") { + name + fields { + name + type { + name + kind + } + } + } + } + `, + ExpectedData: map[string]any{ + "__type": map[string]any{ + "name": "OldUsers", // NOTE: "OldUsers" MUST exist + "fields": schemaUtils.DefaultFields.Append( + schemaUtils.Field{ + "name": "name", + "type": map[string]any{ + "kind": "SCALAR", + "name": "String", + }, + }, + ).Append( + schemaUtils.Field{ + "name": "age", + "type": map[string]any{ + "kind": "SCALAR", + "name": "Int", + }, + }, + ).Tidy(), + }, + }, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type NewUsers @policy( + id: "%s", + resource: "%s" + ) { + name: String + age: Int + } + `, + policyIDOfValidDPI, + sharedSameResourceName, + ), + }, + + testUtils.IntrospectionRequest{ + Request: ` + query { + __type (name: "NewUsers") { + name + fields { + name + type { + name + kind + } + } + } + } + `, + ExpectedData: map[string]any{ + "__type": map[string]any{ + "name": "NewUsers", // NOTE: "NewUsers" MUST exist + "fields": schemaUtils.DefaultFields.Append( + schemaUtils.Field{ + "name": "name", + "type": map[string]any{ + "kind": "SCALAR", + "name": "String", + }, + }, + ).Append( + schemaUtils.Field{ + "name": "age", + "type": map[string]any{ + "kind": "SCALAR", + "name": "Int", + }, + }, + ).Tidy(), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/schema/add_dpi/fixture.go b/tests/integration/acp/schema/add_dpi/fixture.go new file mode 100644 index 0000000000..3b3c83da6f --- /dev/null +++ b/tests/integration/acp/schema/add_dpi/fixture.go @@ -0,0 +1,18 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_schema_add_dpi + +import ( + acpUtils "github.com/sourcenetwork/defradb/tests/integration/acp" +) + +var actor1Identity = acpUtils.Actor1Identity +var actor2Identity = acpUtils.Actor2Identity diff --git a/tests/integration/acp/schema/add_dpi/reject_empty_arg_on_schema_test.go b/tests/integration/acp/schema/add_dpi/reject_empty_arg_on_schema_test.go new file mode 100644 index 0000000000..b23f47e19b --- /dev/null +++ b/tests/integration/acp/schema/add_dpi/reject_empty_arg_on_schema_test.go @@ -0,0 +1,165 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_schema_add_dpi + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestACP_AddDPISchema_NoArgWasSpecifiedOnSchema_SchemaRejected(t *testing.T) { + policyIDOfValidDPI := "4f13c5084c3d0e1e5c5db702fceef84c3b6ab948949ca8e27fcaad3fb8bc39f4" + + test := testUtils.TestCase{ + + Description: "Test acp, add dpi schema, but no arg was specified on schema, reject schema", + + Actions: []any{ + + testUtils.AddPolicy{ + + Identity: actor1Identity, + + Policy: ` + description: A Valid Defra Policy Interface (DPI) + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + `, + + ExpectedPolicyID: policyIDOfValidDPI, + }, + + testUtils.SchemaUpdate{ + Schema: ` + type Users @policy { + name: String + age: Int + } + `, + ExpectedError: "missing policy arguments, must have both id and resource", + }, + + testUtils.IntrospectionRequest{ + Request: ` + query { + __type (name: "Users") { + name + fields { + name + type { + name + kind + } + } + } + } + `, + ExpectedData: map[string]any{ + "__type": nil, // NOTE: No "Users" should exist. + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_AddDPISchema_SpecifiedArgsAreEmptyOnSchema_SchemaRejected(t *testing.T) { + policyIDOfValidDPI := "4f13c5084c3d0e1e5c5db702fceef84c3b6ab948949ca8e27fcaad3fb8bc39f4" + + test := testUtils.TestCase{ + + Description: "Test acp, add dpi schema, specified args on schema are empty, reject schema", + + Actions: []any{ + + testUtils.AddPolicy{ + + Identity: actor1Identity, + + Policy: ` + description: A Valid Defra Policy Interface (DPI) + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + `, + + ExpectedPolicyID: policyIDOfValidDPI, + }, + + testUtils.SchemaUpdate{ + Schema: ` + type Users @policy(resource: "", id: "") { + name: String + age: Int + } + `, + ExpectedError: "missing policy arguments, must have both id and resource", + }, + + testUtils.IntrospectionRequest{ + Request: ` + query { + __type (name: "Users") { + name + fields { + name + type { + name + kind + } + } + } + } + `, + ExpectedData: map[string]any{ + "__type": nil, // NOTE: No "Users" should exist. + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/schema/add_dpi/reject_invalid_arg_type_on_schema_test.go b/tests/integration/acp/schema/add_dpi/reject_invalid_arg_type_on_schema_test.go new file mode 100644 index 0000000000..94b3fd2dde --- /dev/null +++ b/tests/integration/acp/schema/add_dpi/reject_invalid_arg_type_on_schema_test.go @@ -0,0 +1,169 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_schema_add_dpi + +import ( + "fmt" + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestACP_AddDPISchema_InvalidPolicyIDArgTypeWasSpecifiedOnSchema_SchemaRejected(t *testing.T) { + policyIDOfValidDPI := "4f13c5084c3d0e1e5c5db702fceef84c3b6ab948949ca8e27fcaad3fb8bc39f4" + + test := testUtils.TestCase{ + + Description: "Test acp, add dpi schema, but invalid policyID arg type was specified on schema, reject schema", + + Actions: []any{ + + testUtils.AddPolicy{ + + Identity: actor1Identity, + + Policy: ` + description: A Valid Defra Policy Interface (DPI) + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + `, + + ExpectedPolicyID: policyIDOfValidDPI, + }, + + testUtils.SchemaUpdate{ + Schema: ` + type Users @policy(id: 123 , resource: "users") { + name: String + age: Int + } + `, + ExpectedError: "policy directive with invalid id property", + }, + + testUtils.IntrospectionRequest{ + Request: ` + query { + __type (name: "Users") { + name + fields { + name + type { + name + kind + } + } + } + } + `, + ExpectedData: map[string]any{ + "__type": nil, // NOTE: No "Users" should exist. + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_AddDPISchema_InvalidResourceArgTypeWasSpecifiedOnSchema_SchemaRejected(t *testing.T) { + policyIDOfValidDPI := "4f13c5084c3d0e1e5c5db702fceef84c3b6ab948949ca8e27fcaad3fb8bc39f4" + + test := testUtils.TestCase{ + + Description: "Test acp, add dpi schema, but invalid resource arg type was specified on schema, reject schema", + + Actions: []any{ + + testUtils.AddPolicy{ + + Identity: actor1Identity, + + Policy: ` + description: A Valid Defra Policy Interface (DPI) + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + `, + + ExpectedPolicyID: policyIDOfValidDPI, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy(id: "%s" , resource: 123) { + name: String + age: Int + } + `, + policyIDOfValidDPI, + ), + + ExpectedError: "policy directive with invalid resource property", + }, + + testUtils.IntrospectionRequest{ + Request: ` + query { + __type (name: "Users") { + name + fields { + name + type { + name + kind + } + } + } + } + `, + ExpectedData: map[string]any{ + "__type": nil, // NOTE: No "Users" should exist. + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/schema/add_dpi/reject_invalid_owner_read_perm_on_dpi_test.go b/tests/integration/acp/schema/add_dpi/reject_invalid_owner_read_perm_on_dpi_test.go new file mode 100644 index 0000000000..540222d37b --- /dev/null +++ b/tests/integration/acp/schema/add_dpi/reject_invalid_owner_read_perm_on_dpi_test.go @@ -0,0 +1,438 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_schema_add_dpi + +import ( + "fmt" + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestACP_AddDPISchema_OwnerMissingRequiredReadPermissionOnDPI_SchemaRejected(t *testing.T) { + policyIDOfInvalidDPI := "782ffee730033ff01a3bdb05a3aa130f08c0914887378b0dfee314be6c3a8dd0" + + test := testUtils.TestCase{ + + Description: "Test acp, add dpi schema, with owner missing required read permission, reject schema", + + Actions: []any{ + + testUtils.AddPolicy{ + + Identity: actor1Identity, + + Policy: ` + description: a policy + + actor: + name: actor + + resources: + users: + permissions: + write: + expr: owner + read: + expr: r + + relations: + owner: + types: + - actor + r: + types: + - actor + `, + + ExpectedPolicyID: policyIDOfInvalidDPI, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + policyIDOfInvalidDPI, + ), + + ExpectedError: fmt.Sprintf( + "expr of required permission must start with required relation. Permission: %s, Relation: %s", + "read", + "owner", + ), + }, + + testUtils.IntrospectionRequest{ + Request: ` + query { + __type (name: "Users") { + name + fields { + name + type { + name + kind + } + } + } + } + `, + ExpectedData: map[string]any{ + "__type": nil, // NOTE: No "Users" should exist. + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_AddDPISchema_OwnerMissingRequiredReadPermissionLabelOnDPI_SchemaRejected(t *testing.T) { + policyIDOfInvalidDPI := "62d2d65d0304cb9a16bb4f07d1f48c7142911f73bc1db6ee54cdd2c6c7949c73" + + test := testUtils.TestCase{ + + Description: "Test acp, add dpi schema, with owner missing required read permission label, reject schema", + + Actions: []any{ + + testUtils.AddPolicy{ + + Identity: actor1Identity, + + Policy: ` + description: a policy + + actor: + name: actor + + resources: + users: + permissions: + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + `, + + ExpectedPolicyID: policyIDOfInvalidDPI, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + policyIDOfInvalidDPI, + ), + + ExpectedError: fmt.Sprintf( + "resource is missing required permission on policy. PolicyID: %s, ResourceName: %s, Permission: %s", + policyIDOfInvalidDPI, + "users", + "read", + ), + }, + + testUtils.IntrospectionRequest{ + Request: ` + query { + __type (name: "Users") { + name + fields { + name + type { + name + kind + } + } + } + } + `, + ExpectedData: map[string]any{ + "__type": nil, // NOTE: No "Users" should exist. + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_AddDPISchema_OwnerSpecifiedIncorrectlyOnReadPermissionExprOnDPI_SchemaRejected(t *testing.T) { + policyIDOfInvalidDPI := "f9fe33e8b2ee18a65d16bdc8017fe829ec13b0797330422639cd9dafac7b00f8" + + test := testUtils.TestCase{ + + Description: "Test acp, add dpi schema, owner specified incorrectly on read permission expression, reject schema", + + Actions: []any{ + + testUtils.AddPolicy{ + + Identity: actor1Identity, + + Policy: ` + description: a policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: reader + owner + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + `, + + ExpectedPolicyID: policyIDOfInvalidDPI, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + policyIDOfInvalidDPI, + ), + + ExpectedError: fmt.Sprintf( + "expr of required permission must start with required relation. Permission: %s, Relation: %s", + "read", + "owner", + ), + }, + + testUtils.IntrospectionRequest{ + Request: ` + query { + __type (name: "Users") { + name + fields { + name + type { + name + kind + } + } + } + } + `, + ExpectedData: map[string]any{ + "__type": nil, // NOTE: No "Users" should exist. + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_AddDPISchema_OwnerSpecifiedIncorrectlyOnReadPermissionNoSpaceExprOnDPI_SchemaRejected(t *testing.T) { + policyIDOfInvalidDPI := "08cc6bed6b9695dd47b6bf1e934ff91975db598631a55c26db9ead1393a77588" + + test := testUtils.TestCase{ + + Description: "Test acp, add dpi schema, owner specified incorrectly on read permission expression (no space), reject schema", + + Actions: []any{ + + testUtils.AddPolicy{ + + Identity: actor1Identity, + + Policy: ` + description: a policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: reader+owner + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + `, + + ExpectedPolicyID: policyIDOfInvalidDPI, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + policyIDOfInvalidDPI, + ), + + ExpectedError: fmt.Sprintf( + "expr of required permission must start with required relation. Permission: %s, Relation: %s", + "read", + "owner", + ), + }, + + testUtils.IntrospectionRequest{ + Request: ` + query { + __type (name: "Users") { + name + fields { + name + type { + name + kind + } + } + } + } + `, + ExpectedData: map[string]any{ + "__type": nil, // NOTE: No "Users" should exist. + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_AddDPISchema_MaliciousOwnerSpecifiedOnReadPermissionExprOnDPI_SchemaRejected(t *testing.T) { + policyIDOfInvalidDPI := "fff5c6fc25fbc2a9e5a7251c19b1cb950889281d656e5aeb642ce7c16f181c9b" + + test := testUtils.TestCase{ + + Description: "Test acp, add dpi schema, malicious owner specified on read permission expression, reject schema", + + Actions: []any{ + + testUtils.AddPolicy{ + + Identity: actor1Identity, + + Policy: ` + description: a policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: ownerBad + write: + expr: owner + + relations: + owner: + types: + - actor + ownerBad: + types: + - actor + `, + + ExpectedPolicyID: policyIDOfInvalidDPI, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + policyIDOfInvalidDPI, + ), + + ExpectedError: fmt.Sprintf( + "expr of required permission has invalid character after relation. Permission: %s, Relation: %s, Character: %s", + "read", + "owner", + "B", + ), + }, + + testUtils.IntrospectionRequest{ + Request: ` + query { + __type (name: "Users") { + name + fields { + name + type { + name + kind + } + } + } + } + `, + ExpectedData: map[string]any{ + "__type": nil, // NOTE: No "Users" should exist. + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/schema/add_dpi/reject_invalid_owner_read_perm_symbol_on_dpi_test.go b/tests/integration/acp/schema/add_dpi/reject_invalid_owner_read_perm_symbol_on_dpi_test.go new file mode 100644 index 0000000000..29ec5a9ecf --- /dev/null +++ b/tests/integration/acp/schema/add_dpi/reject_invalid_owner_read_perm_symbol_on_dpi_test.go @@ -0,0 +1,273 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_schema_add_dpi + +import ( + "fmt" + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestACP_AddDPISchema_OwnerRelationWithDifferenceSetOpOnReadPermissionExprOnDPI_SchemaRejected(t *testing.T) { + policyIDOfInvalidDPI := "c9bb1811862ded3a4a8a931054bd99ecabde3f41231c6aa2c50e1f1f5af2b5e8" + + test := testUtils.TestCase{ + + Description: "Test acp, add dpi schema, owner relation with difference (-) set operation on read permission expression, reject schema", + + Actions: []any{ + + testUtils.AddPolicy{ + + Identity: actor1Identity, + + Policy: ` + description: a policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner - reader + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + `, + + ExpectedPolicyID: policyIDOfInvalidDPI, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + policyIDOfInvalidDPI, + ), + + ExpectedError: fmt.Sprintf( + "expr of required permission has invalid character after relation. Permission: %s, Relation: %s, Character: %s", + "read", + "owner", + "-", + ), + }, + + testUtils.IntrospectionRequest{ + Request: ` + query { + __type (name: "Users") { + name + fields { + name + type { + name + kind + } + } + } + } + `, + ExpectedData: map[string]any{ + "__type": nil, // NOTE: No "Users" should exist. + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_AddDPISchema_OwnerRelationWithIntersectionSetOpOnReadPermissionExprOnDPI_SchemaRejected(t *testing.T) { + policyIDOfInvalidDPI := "7bff1d8a967df4de99f8daaa2567c660eb6e7b2c554c9a49bf831230e5d9eba6" + + test := testUtils.TestCase{ + + Description: "Test acp, add dpi schema, owner relation with intersection (&) set operation on read permission expression, reject schema", + + Actions: []any{ + + testUtils.AddPolicy{ + + Identity: actor1Identity, + + Policy: ` + description: a policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner & reader + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + `, + + ExpectedPolicyID: policyIDOfInvalidDPI, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + policyIDOfInvalidDPI, + ), + + ExpectedError: fmt.Sprintf( + "expr of required permission has invalid character after relation. Permission: %s, Relation: %s, Character: %s", + "read", + "owner", + "&", + ), + }, + + testUtils.IntrospectionRequest{ + Request: ` + query { + __type (name: "Users") { + name + fields { + name + type { + name + kind + } + } + } + } + `, + ExpectedData: map[string]any{ + "__type": nil, // NOTE: No "Users" should exist. + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_AddDPISchema_OwnerRelationWithInvalidSetOpOnReadPermissionExprOnDPI_SchemaRejected(t *testing.T) { + policyIDOfInvalidDPI := "cc2fab7c299e94e2bd9370708d26ca1262ff3b0d75f9a58d1086658cfec26c65" + + test := testUtils.TestCase{ + + Description: "Test acp, add dpi schema, owner relation with invalid set operation on read permission expression, reject schema", + + Actions: []any{ + + testUtils.AddPolicy{ + + Identity: actor1Identity, + + Policy: ` + description: a policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner - owner + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + `, + + ExpectedPolicyID: policyIDOfInvalidDPI, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + policyIDOfInvalidDPI, + ), + + ExpectedError: fmt.Sprintf( + "expr of required permission has invalid character after relation. Permission: %s, Relation: %s, Character: %s", + "read", + "owner", + "-", + ), + }, + + testUtils.IntrospectionRequest{ + Request: ` + query { + __type (name: "Users") { + name + fields { + name + type { + name + kind + } + } + } + } + `, + ExpectedData: map[string]any{ + "__type": nil, // NOTE: No "Users" should exist. + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/schema/add_dpi/reject_invalid_owner_write_perm_on_dpi_test.go b/tests/integration/acp/schema/add_dpi/reject_invalid_owner_write_perm_on_dpi_test.go new file mode 100644 index 0000000000..f3b5877444 --- /dev/null +++ b/tests/integration/acp/schema/add_dpi/reject_invalid_owner_write_perm_on_dpi_test.go @@ -0,0 +1,438 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_schema_add_dpi + +import ( + "fmt" + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestACP_AddDPISchema_OwnerMissingRequiredWritePermissionOnDPI_SchemaRejected(t *testing.T) { + policyIDOfInvalidDPI := "4256d2b54767cafd0e0a2b39a6faebf44bc99a7fc74ff5b51894f7accf2ef638" + + test := testUtils.TestCase{ + + Description: "Test acp, add dpi schema, with owner missing required write permission, reject schema", + + Actions: []any{ + + testUtils.AddPolicy{ + + Identity: actor1Identity, + + Policy: ` + description: a policy + + actor: + name: actor + + resources: + users: + permissions: + write: + expr: w + read: + expr: owner + + relations: + owner: + types: + - actor + w: + types: + - actor + `, + + ExpectedPolicyID: policyIDOfInvalidDPI, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + policyIDOfInvalidDPI, + ), + + ExpectedError: fmt.Sprintf( + "expr of required permission must start with required relation. Permission: %s, Relation: %s", + "write", + "owner", + ), + }, + + testUtils.IntrospectionRequest{ + Request: ` + query { + __type (name: "Users") { + name + fields { + name + type { + name + kind + } + } + } + } + `, + ExpectedData: map[string]any{ + "__type": nil, // NOTE: No "Users" should exist. + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_AddDPISchema_OwnerMissingRequiredWritePermissionLabelOnDPI_SchemaRejected(t *testing.T) { + policyIDOfInvalidDPI := "e8be944571cd6b52faa1e8b75fa339a9f60065b65d78ed126d037722e2512593" + + test := testUtils.TestCase{ + + Description: "Test acp, add dpi schema, with owner missing required write permission label, reject schema", + + Actions: []any{ + + testUtils.AddPolicy{ + + Identity: actor1Identity, + + Policy: ` + description: a policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + `, + + ExpectedPolicyID: policyIDOfInvalidDPI, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + policyIDOfInvalidDPI, + ), + + ExpectedError: fmt.Sprintf( + "resource is missing required permission on policy. PolicyID: %s, ResourceName: %s, Permission: %s", + policyIDOfInvalidDPI, + "users", + "write", + ), + }, + + testUtils.IntrospectionRequest{ + Request: ` + query { + __type (name: "Users") { + name + fields { + name + type { + name + kind + } + } + } + } + `, + ExpectedData: map[string]any{ + "__type": nil, // NOTE: No "Users" should exist. + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_AddDPISchema_OwnerSpecifiedIncorrectlyOnWritePermissionExprOnDPI_SchemaRejected(t *testing.T) { + policyIDOfInvalidDPI := "34ff30cb9e80993e2b11f86f85c6daa7cd9bf25724e4d5ff0704518d7970d074" + + test := testUtils.TestCase{ + + Description: "Test acp, add dpi schema, owner specified incorrectly on write permission expression, reject schema", + + Actions: []any{ + + testUtils.AddPolicy{ + + Identity: actor1Identity, + + Policy: ` + description: a policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + write: + expr: writer + owner + + relations: + owner: + types: + - actor + writer: + types: + - actor + `, + + ExpectedPolicyID: policyIDOfInvalidDPI, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + policyIDOfInvalidDPI, + ), + + ExpectedError: fmt.Sprintf( + "expr of required permission must start with required relation. Permission: %s, Relation: %s", + "write", + "owner", + ), + }, + + testUtils.IntrospectionRequest{ + Request: ` + query { + __type (name: "Users") { + name + fields { + name + type { + name + kind + } + } + } + } + `, + ExpectedData: map[string]any{ + "__type": nil, // NOTE: No "Users" should exist. + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_AddDPISchema_OwnerSpecifiedIncorrectlyOnWritePermissionNoSpaceExprOnDPI_SchemaRejected(t *testing.T) { + policyIDOfInvalidDPI := "2e9fc5805b0442e856e9893fea0f4759d333e442856a230ed741b88670e6426c" + + test := testUtils.TestCase{ + + Description: "Test acp, add dpi schema, owner specified incorrectly on write permission expression (no space), reject schema", + + Actions: []any{ + + testUtils.AddPolicy{ + + Identity: actor1Identity, + + Policy: ` + description: a policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + write: + expr: writer+owner + + relations: + owner: + types: + - actor + writer: + types: + - actor + `, + + ExpectedPolicyID: policyIDOfInvalidDPI, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + policyIDOfInvalidDPI, + ), + + ExpectedError: fmt.Sprintf( + "expr of required permission must start with required relation. Permission: %s, Relation: %s", + "write", + "owner", + ), + }, + + testUtils.IntrospectionRequest{ + Request: ` + query { + __type (name: "Users") { + name + fields { + name + type { + name + kind + } + } + } + } + `, + ExpectedData: map[string]any{ + "__type": nil, // NOTE: No "Users" should exist. + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_AddDPISchema_MaliciousOwnerSpecifiedOnWritePermissionExprOnDPI_SchemaRejected(t *testing.T) { + policyIDOfInvalidDPI := "3bcd650ac1e69d5efe6c930d05420231a0a69e6018d0f1015e0ecef9869d8dd5" + + test := testUtils.TestCase{ + + Description: "Test acp, add dpi schema, malicious owner specified on write permission expression, reject schema", + + Actions: []any{ + + testUtils.AddPolicy{ + + Identity: actor1Identity, + + Policy: ` + description: a policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + write: + expr: ownerBad + + relations: + owner: + types: + - actor + ownerBad: + types: + - actor + `, + + ExpectedPolicyID: policyIDOfInvalidDPI, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + policyIDOfInvalidDPI, + ), + + ExpectedError: fmt.Sprintf( + "expr of required permission has invalid character after relation. Permission: %s, Relation: %s, Character: %s", + "write", + "owner", + "B", + ), + }, + + testUtils.IntrospectionRequest{ + Request: ` + query { + __type (name: "Users") { + name + fields { + name + type { + name + kind + } + } + } + } + `, + ExpectedData: map[string]any{ + "__type": nil, // NOTE: No "Users" should exist. + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/schema/add_dpi/reject_invalid_owner_write_perm_symbol_on_dpi_test.go b/tests/integration/acp/schema/add_dpi/reject_invalid_owner_write_perm_symbol_on_dpi_test.go new file mode 100644 index 0000000000..96ff618123 --- /dev/null +++ b/tests/integration/acp/schema/add_dpi/reject_invalid_owner_write_perm_symbol_on_dpi_test.go @@ -0,0 +1,273 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_schema_add_dpi + +import ( + "fmt" + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestACP_AddDPISchema_OwnerRelationWithDifferenceSetOpOnWritePermissionExprOnDPI_SchemaRejected(t *testing.T) { + policyIDOfInvalidDPI := "2e14b379df6008ba577a11ac47d59c09eb0146afc5453e1ac0f40178ac3f5720" + + test := testUtils.TestCase{ + + Description: "Test acp, add dpi schema, owner relation with difference (-) set operation on write permission expression, reject schema", + + Actions: []any{ + + testUtils.AddPolicy{ + + Identity: actor1Identity, + + Policy: ` + description: a policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + write: + expr: owner - reader + + relations: + owner: + types: + - actor + reader: + types: + - actor + `, + + ExpectedPolicyID: policyIDOfInvalidDPI, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + policyIDOfInvalidDPI, + ), + + ExpectedError: fmt.Sprintf( + "expr of required permission has invalid character after relation. Permission: %s, Relation: %s, Character: %s", + "write", + "owner", + "-", + ), + }, + + testUtils.IntrospectionRequest{ + Request: ` + query { + __type (name: "Users") { + name + fields { + name + type { + name + kind + } + } + } + } + `, + ExpectedData: map[string]any{ + "__type": nil, // NOTE: No "Users" should exist. + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_AddDPISchema_OwnerRelationWithIntersectionSetOpOnWritePermissionExprOnDPI_SchemaRejected(t *testing.T) { + policyIDOfInvalidDPI := "143546c4da209d67466690bf749899c37cd956f64c128ea7cca0662688f832ac" + + test := testUtils.TestCase{ + + Description: "Test acp, add dpi schema, owner relation with intersection (&) set operation on write permission expression, reject schema", + + Actions: []any{ + + testUtils.AddPolicy{ + + Identity: actor1Identity, + + Policy: ` + description: a policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + write: + expr: owner & reader + + relations: + owner: + types: + - actor + reader: + types: + - actor + `, + + ExpectedPolicyID: policyIDOfInvalidDPI, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + policyIDOfInvalidDPI, + ), + + ExpectedError: fmt.Sprintf( + "expr of required permission has invalid character after relation. Permission: %s, Relation: %s, Character: %s", + "write", + "owner", + "&", + ), + }, + + testUtils.IntrospectionRequest{ + Request: ` + query { + __type (name: "Users") { + name + fields { + name + type { + name + kind + } + } + } + } + `, + ExpectedData: map[string]any{ + "__type": nil, // NOTE: No "Users" should exist. + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_AddDPISchema_OwnerRelationWithInvalidSetOpOnWritePermissionExprOnDPI_SchemaRejected(t *testing.T) { + policyIDOfInvalidDPI := "b9b4e941be904b0472ab6031628ce08ae4f87314e68972a6cfc114ed449820a4" + + test := testUtils.TestCase{ + + Description: "Test acp, add dpi schema, owner relation with invalid set operation on write permission expression, reject schema", + + Actions: []any{ + + testUtils.AddPolicy{ + + Identity: actor1Identity, + + Policy: ` + description: a policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + write: + expr: owner - owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + `, + + ExpectedPolicyID: policyIDOfInvalidDPI, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + policyIDOfInvalidDPI, + ), + + ExpectedError: fmt.Sprintf( + "expr of required permission has invalid character after relation. Permission: %s, Relation: %s, Character: %s", + "write", + "owner", + "-", + ), + }, + + testUtils.IntrospectionRequest{ + Request: ` + query { + __type (name: "Users") { + name + fields { + name + type { + name + kind + } + } + } + } + `, + ExpectedData: map[string]any{ + "__type": nil, // NOTE: No "Users" should exist. + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/schema/add_dpi/reject_missing_dpi_test.go b/tests/integration/acp/schema/add_dpi/reject_missing_dpi_test.go new file mode 100644 index 0000000000..c59008edf5 --- /dev/null +++ b/tests/integration/acp/schema/add_dpi/reject_missing_dpi_test.go @@ -0,0 +1,149 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_schema_add_dpi + +import ( + "fmt" + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestACP_AddDPISchema_WhereNoPolicyWasAdded_SchemaRejected(t *testing.T) { + nonExistingPolicyID := "dfe202ffb4f0fe9b46157c313213a3839e08a6f0a7c3aba55e4724cb49ffde8a" + + test := testUtils.TestCase{ + + Description: "Test acp, add dpi schema, but no policy was added, reject schema", + + Actions: []any{ + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + nonExistingPolicyID, + ), + + ExpectedError: "policyID specified does not exist with acp", + }, + + testUtils.IntrospectionRequest{ + Request: ` + query { + __type (name: "Users") { + name + fields { + name + type { + name + kind + } + } + } + } + `, + ExpectedData: map[string]any{ + "__type": nil, // NOTE: No "Users" should exist. + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_AddDPISchema_WhereAPolicyWasAddedButLinkedPolicyWasNotAdded_SchemaRejected(t *testing.T) { + policyAdded := "4f13c5084c3d0e1e5c5db702fceef84c3b6ab948949ca8e27fcaad3fb8bc39f4" + incorrectPolicyID := "dfe202ffb4f0fe9b46157c313213a3839e08a6f0a7c3aba55e4724cb49ffde8a" + + test := testUtils.TestCase{ + + Description: "Test acp, add dpi schema, but specify incorrect policy ID, reject schema", + + Actions: []any{ + + testUtils.AddPolicy{ + + Identity: actor1Identity, + + Policy: ` + description: A Valid Defra Policy Interface (DPI) + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + `, + + ExpectedPolicyID: policyAdded, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + incorrectPolicyID, + ), + + ExpectedError: "policyID specified does not exist with acp", + }, + + testUtils.IntrospectionRequest{ + Request: ` + query { + __type (name: "Users") { + name + fields { + name + type { + name + kind + } + } + } + } + `, + ExpectedData: map[string]any{ + "__type": nil, // NOTE: No "Users" should exist. + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/schema/add_dpi/reject_missing_id_arg_on_schema_test.go b/tests/integration/acp/schema/add_dpi/reject_missing_id_arg_on_schema_test.go new file mode 100644 index 0000000000..7a5942f2e6 --- /dev/null +++ b/tests/integration/acp/schema/add_dpi/reject_missing_id_arg_on_schema_test.go @@ -0,0 +1,165 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_schema_add_dpi + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestACP_AddDPISchema_NoPolicyIDWasSpecifiedOnSchema_SchemaRejected(t *testing.T) { + policyIDOfValidDPI := "4f13c5084c3d0e1e5c5db702fceef84c3b6ab948949ca8e27fcaad3fb8bc39f4" + + test := testUtils.TestCase{ + + Description: "Test acp, add dpi schema, but no policyID was specified on schema, reject schema", + + Actions: []any{ + + testUtils.AddPolicy{ + + Identity: actor1Identity, + + Policy: ` + description: A Valid Defra Policy Interface (DPI) + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + `, + + ExpectedPolicyID: policyIDOfValidDPI, + }, + + testUtils.SchemaUpdate{ + Schema: ` + type Users @policy(resource: "users") { + name: String + age: Int + } + `, + ExpectedError: "policyID must not be empty", + }, + + testUtils.IntrospectionRequest{ + Request: ` + query { + __type (name: "Users") { + name + fields { + name + type { + name + kind + } + } + } + } + `, + ExpectedData: map[string]any{ + "__type": nil, // NOTE: No "Users" should exist. + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_AddDPISchema_SpecifiedPolicyIDArgIsEmptyOnSchema_SchemaRejected(t *testing.T) { + policyIDOfValidDPI := "4f13c5084c3d0e1e5c5db702fceef84c3b6ab948949ca8e27fcaad3fb8bc39f4" + + test := testUtils.TestCase{ + + Description: "Test acp, add dpi schema, specified policyID arg on schema is empty, reject schema", + + Actions: []any{ + + testUtils.AddPolicy{ + + Identity: actor1Identity, + + Policy: ` + description: A Valid Defra Policy Interface (DPI) + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + `, + + ExpectedPolicyID: policyIDOfValidDPI, + }, + + testUtils.SchemaUpdate{ + Schema: ` + type Users @policy(resource: "users", id: "") { + name: String + age: Int + } + `, + ExpectedError: "policyID must not be empty", + }, + + testUtils.IntrospectionRequest{ + Request: ` + query { + __type (name: "Users") { + name + fields { + name + type { + name + kind + } + } + } + } + `, + ExpectedData: map[string]any{ + "__type": nil, // NOTE: No "Users" should exist. + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/schema/add_dpi/reject_missing_perms_on_dpi_test.go b/tests/integration/acp/schema/add_dpi/reject_missing_perms_on_dpi_test.go new file mode 100644 index 0000000000..16c6eb1024 --- /dev/null +++ b/tests/integration/acp/schema/add_dpi/reject_missing_perms_on_dpi_test.go @@ -0,0 +1,97 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_schema_add_dpi + +import ( + "fmt" + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestACP_AddDPISchema_MissingRequiredReadPermissionOnDPI_SchemaRejected(t *testing.T) { + policyIDOfInvalidDPI := "7eb7448daa631cfe33da3a149f5eea716026f54bf23ce1315c594259382c5c57" + + test := testUtils.TestCase{ + + Description: "Test acp, add dpi schema, with missing required read permission, reject schema", + + Actions: []any{ + + testUtils.AddPolicy{ + + Identity: actor1Identity, + + Policy: ` + description: A policy + + actor: + name: actor + + resources: + users: + relations: + owner: + types: + - actor + reader: + types: + - actor + `, + + ExpectedPolicyID: policyIDOfInvalidDPI, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + policyIDOfInvalidDPI, + ), + + ExpectedError: fmt.Sprintf( + "resource is missing required permission on policy. PolicyID: %s, ResourceName: %s, Permission: %s", + policyIDOfInvalidDPI, + "users", + "read", + ), + }, + + testUtils.IntrospectionRequest{ + Request: ` + query { + __type (name: "Users") { + name + fields { + name + type { + name + kind + } + } + } + } + `, + ExpectedData: map[string]any{ + "__type": nil, // NOTE: No "Users" should exist. + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/schema/add_dpi/reject_missing_resource_arg_on_schema_test.go b/tests/integration/acp/schema/add_dpi/reject_missing_resource_arg_on_schema_test.go new file mode 100644 index 0000000000..45635eae15 --- /dev/null +++ b/tests/integration/acp/schema/add_dpi/reject_missing_resource_arg_on_schema_test.go @@ -0,0 +1,170 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_schema_add_dpi + +import ( + "fmt" + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestACP_AddDPISchema_NoResourceWasSpecifiedOnSchema_SchemaRejected(t *testing.T) { + policyIDOfValidDPI := "4f13c5084c3d0e1e5c5db702fceef84c3b6ab948949ca8e27fcaad3fb8bc39f4" + + test := testUtils.TestCase{ + + Description: "Test acp, add dpi schema, but no resource was specified on schema, reject schema", + + Actions: []any{ + + testUtils.AddPolicy{ + + Identity: actor1Identity, + + Policy: ` + description: A Valid Defra Policy Interface (DPI) + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + `, + + ExpectedPolicyID: policyIDOfValidDPI, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy(id: "%s") { + name: String + age: Int + } + `, + policyIDOfValidDPI, + ), + ExpectedError: "resource name must not be empty", + }, + + testUtils.IntrospectionRequest{ + Request: ` + query { + __type (name: "Users") { + name + fields { + name + type { + name + kind + } + } + } + } + `, + ExpectedData: map[string]any{ + "__type": nil, // NOTE: No "Users" should exist. + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_AddDPISchema_SpecifiedResourceArgIsEmptyOnSchema_SchemaRejected(t *testing.T) { + policyIDOfValidDPI := "4f13c5084c3d0e1e5c5db702fceef84c3b6ab948949ca8e27fcaad3fb8bc39f4" + + test := testUtils.TestCase{ + + Description: "Test acp, add dpi schema, specified resource arg on schema is empty, reject schema", + + Actions: []any{ + + testUtils.AddPolicy{ + + Identity: actor1Identity, + + Policy: ` + description: A Valid Defra Policy Interface (DPI) + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + `, + + ExpectedPolicyID: policyIDOfValidDPI, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy(id: "%s", resource: "") { + name: String + age: Int + } + `, + policyIDOfValidDPI, + ), + ExpectedError: "resource name must not be empty", + }, + + testUtils.IntrospectionRequest{ + Request: ` + query { + __type (name: "Users") { + name + fields { + name + type { + name + kind + } + } + } + } + `, + ExpectedData: map[string]any{ + "__type": nil, // NOTE: No "Users" should exist. + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/schema/add_dpi/reject_missing_resource_on_dpi_test.go b/tests/integration/acp/schema/add_dpi/reject_missing_resource_on_dpi_test.go new file mode 100644 index 0000000000..3d50f1c2a7 --- /dev/null +++ b/tests/integration/acp/schema/add_dpi/reject_missing_resource_on_dpi_test.go @@ -0,0 +1,98 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_schema_add_dpi + +import ( + "fmt" + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestACP_AddDPISchema_SpecifiedResourceDoesNotExistOnDPI_SchemaRejected(t *testing.T) { + policyIDOfValidDPI := "4f13c5084c3d0e1e5c5db702fceef84c3b6ab948949ca8e27fcaad3fb8bc39f4" + + test := testUtils.TestCase{ + + Description: "Test acp, add dpi schema, but specified resource does not exist on DPI, reject schema", + + Actions: []any{ + + testUtils.AddPolicy{ + + Identity: actor1Identity, + + Policy: ` + description: A Valid Defra Policy Interface (DPI) + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + `, + + ExpectedPolicyID: policyIDOfValidDPI, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "doesntExist" + ) { + name: String + age: Int + } + `, + policyIDOfValidDPI, + ), + + ExpectedError: "resource does not exist on the specified policy", + }, + + testUtils.IntrospectionRequest{ + Request: ` + query { + __type (name: "Users") { + name + fields { + name + type { + name + kind + } + } + } + } + `, + ExpectedData: map[string]any{ + "__type": nil, // NOTE: No "Users" should exist. + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/schema/add_dpi/reject_mixed_resources_on_partial_dpi_test.go b/tests/integration/acp/schema/add_dpi/reject_mixed_resources_on_partial_dpi_test.go new file mode 100644 index 0000000000..ba9e06a2b6 --- /dev/null +++ b/tests/integration/acp/schema/add_dpi/reject_mixed_resources_on_partial_dpi_test.go @@ -0,0 +1,117 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_schema_add_dpi + +import ( + "fmt" + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestACP_AddDPISchema_PartialValidDPIButUseInValidDPIResource_RejectSchema(t *testing.T) { + policyIDOfPartiallyValidDPI := "d5d411825b2d8fa5a550f1e34153b88b375ed9c9af19ce6d2ba1769e237a45d0" + + test := testUtils.TestCase{ + + Description: "Test acp, add dpi schema, has both valid & invalid resources, but use invalid resource, schema rejected", + + Actions: []any{ + + testUtils.AddPolicy{ + + Identity: actor1Identity, + + Policy: ` + description: A Partially Valid Defra Policy Interface (DPI) + + actor: + name: actor + + resources: + usersValid: + permissions: + read: + expr: owner + reader + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + + usersInvalid: + permissions: + read: + expr: reader - owner + write: + expr: reader + + relations: + owner: + types: + - actor + reader: + types: + - actor + `, + + ExpectedPolicyID: policyIDOfPartiallyValidDPI, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "usersInvalid" + ) { + name: String + age: Int + } + `, + policyIDOfPartiallyValidDPI, + ), + + ExpectedError: fmt.Sprintf( + "expr of required permission must start with required relation. Permission: %s, Relation: %s", + "read", + "owner", + ), + }, + + testUtils.IntrospectionRequest{ + Request: ` + query { + __type (name: "Users") { + name + fields { + name + type { + name + kind + } + } + } + } + `, + ExpectedData: map[string]any{ + "__type": nil, // NOTE: No "Users" should exist. + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/backup/one_to_one/export_test.go b/tests/integration/backup/one_to_one/export_test.go index 4ae32cbebc..b52e0bb02f 100644 --- a/tests/integration/backup/one_to_one/export_test.go +++ b/tests/integration/backup/one_to_one/export_test.go @@ -78,8 +78,8 @@ func TestBackupExport_DoubleReletionship_NoError(t *testing.T) { } type Book { name: String - author: User @relation(name: "written_books") - favourite: User @relation(name: "favourite_books") + author: User @relation(name: "written_books") @primary + favourite: User @relation(name: "favourite_books") @primary } `, }, @@ -122,8 +122,8 @@ func TestBackupExport_DoubleReletionshipWithUpdate_NoError(t *testing.T) { } type Book { name: String - author: User @relation(name: "written_books") - favourite: User @relation(name: "favourite_books") + author: User @relation(name: "written_books") @primary + favourite: User @relation(name: "favourite_books") @primary } `, }, diff --git a/tests/integration/backup/one_to_one/import_test.go b/tests/integration/backup/one_to_one/import_test.go index 5405dd4225..8c3aff4fe2 100644 --- a/tests/integration/backup/one_to_one/import_test.go +++ b/tests/integration/backup/one_to_one/import_test.go @@ -205,8 +205,8 @@ func TestBackupImport_DoubleRelationshipWithUpdate_NoError(t *testing.T) { } type Book { name: String - author: User @relation(name: "written_books") - favourite: User @relation(name: "favourite_books") + author: User @relation(name: "written_books") @primary + favourite: User @relation(name: "favourite_books") @primary } `, }, @@ -237,7 +237,8 @@ func TestBackupImport_DoubleRelationshipWithUpdate_NoError(t *testing.T) { }, }, { - "name": "Game of chains", + "name": "Game of chains", + "author": nil, }, }, }, diff --git a/tests/integration/collection/update/simple/with_doc_id_test.go b/tests/integration/collection/update/simple/with_doc_id_test.go deleted file mode 100644 index 6f990f7e70..0000000000 --- a/tests/integration/collection/update/simple/with_doc_id_test.go +++ /dev/null @@ -1,141 +0,0 @@ -// Copyright 2022 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -package update - -import ( - "context" - "testing" - - "github.com/stretchr/testify/assert" - - "github.com/sourcenetwork/defradb/client" - testUtils "github.com/sourcenetwork/defradb/tests/integration/collection" -) - -func TestUpdateWithDocID(t *testing.T) { - docStr := `{ - "name": "John", - "age": 21 - }` - - doc, err := client.NewDocFromJSON([]byte(docStr), colDefMap["Users"].Schema) - if err != nil { - assert.Fail(t, err.Error()) - } - - tests := []testUtils.TestCase{ - { - Description: "Test update users with docID and invalid JSON", - Docs: map[string][]string{ - "Users": {docStr}, - }, - CollectionCalls: map[string][]func(client.Collection) error{ - "Users": []func(c client.Collection) error{ - func(c client.Collection) error { - ctx := context.Background() - _, err := c.UpdateWithDocID(ctx, doc.ID(), `{ - name: "Eric" - }`) - return err - }, - }, - }, - ExpectedError: "cannot parse JSON: cannot parse object", - }, { - Description: "Test update users with docID and invalid updator", - Docs: map[string][]string{ - "Users": {docStr}, - }, - CollectionCalls: map[string][]func(client.Collection) error{ - "Users": []func(c client.Collection) error{ - func(c client.Collection) error { - ctx := context.Background() - _, err := c.UpdateWithDocID(ctx, doc.ID(), `"name: Eric"`) - return err - }, - }, - }, - ExpectedError: "the updater of a document is of invalid type", - }, { - Description: "Test update users with docID and patch updator (not implemented so no change)", - Docs: map[string][]string{ - "Users": {docStr}, - }, - CollectionCalls: map[string][]func(client.Collection) error{ - "Users": []func(c client.Collection) error{ - func(c client.Collection) error { - ctx := context.Background() - _, err := c.UpdateWithDocID(ctx, doc.ID(), `[ - { - "name": "Eric" - }, { - "name": "Sam" - } - ]`) - if err != nil { - return err - } - - d, err := c.Get(ctx, doc.ID(), false) - if err != nil { - return err - } - - name, err := d.Get("name") - if err != nil { - return err - } - - assert.Equal(t, "John", name) - - return nil - }, - }, - }, - }, { - Description: "Test update users with docID", - Docs: map[string][]string{ - "Users": {docStr}, - }, - CollectionCalls: map[string][]func(client.Collection) error{ - "Users": []func(c client.Collection) error{ - func(c client.Collection) error { - ctx := context.Background() - _, err := c.UpdateWithDocID(ctx, doc.ID(), `{ - "name": "Eric" - }`) - if err != nil { - return err - } - - d, err := c.Get(ctx, doc.ID(), false) - if err != nil { - return err - } - - name, err := d.Get("name") - if err != nil { - return err - } - - assert.Equal(t, "Eric", name) - - return nil - }, - }, - }, - }, - } - - for _, test := range tests { - executeTestCase(t, test) - } -} diff --git a/tests/integration/collection/update/simple/with_doc_ids_test.go b/tests/integration/collection/update/simple/with_doc_ids_test.go deleted file mode 100644 index a78fa2cc29..0000000000 --- a/tests/integration/collection/update/simple/with_doc_ids_test.go +++ /dev/null @@ -1,187 +0,0 @@ -// Copyright 2022 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -package update - -import ( - "context" - "testing" - - "github.com/stretchr/testify/assert" - - "github.com/sourcenetwork/defradb/client" - testUtils "github.com/sourcenetwork/defradb/tests/integration/collection" -) - -func TestUpdateWithDocIDs(t *testing.T) { - docStr1 := `{ - "name": "John", - "age": 21 - }` - - doc1, err := client.NewDocFromJSON([]byte(docStr1), colDefMap["Users"].Schema) - if err != nil { - assert.Fail(t, err.Error()) - } - - docStr2 := `{ - "name": "Sam", - "age": 32 - }` - - doc2, err := client.NewDocFromJSON([]byte(docStr2), colDefMap["Users"].Schema) - if err != nil { - assert.Fail(t, err.Error()) - } - - tests := []testUtils.TestCase{ - { - Description: "Test update users with docIDs and invalid JSON", - Docs: map[string][]string{ - "Users": { - docStr1, - docStr2, - }, - }, - CollectionCalls: map[string][]func(client.Collection) error{ - "Users": []func(c client.Collection) error{ - func(c client.Collection) error { - ctx := context.Background() - _, err := c.UpdateWithDocIDs(ctx, []client.DocID{doc1.ID(), doc2.ID()}, `{ - name: "Eric" - }`) - return err - }, - }, - }, - ExpectedError: "cannot parse JSON: cannot parse object", - }, { - Description: "Test update users with docIDs and invalid updator", - Docs: map[string][]string{ - "Users": { - docStr1, - docStr2, - }, - }, - CollectionCalls: map[string][]func(client.Collection) error{ - "Users": []func(c client.Collection) error{ - func(c client.Collection) error { - ctx := context.Background() - _, err := c.UpdateWithDocIDs(ctx, []client.DocID{doc1.ID(), doc2.ID()}, `"name: Eric"`) - return err - }, - }, - }, - ExpectedError: "the updater of a document is of invalid type", - }, { - Description: "Test update users with docIDs and patch updator (not implemented so no change)", - Docs: map[string][]string{ - "Users": { - docStr1, - docStr2, - }, - }, - CollectionCalls: map[string][]func(client.Collection) error{ - "Users": []func(c client.Collection) error{ - func(c client.Collection) error { - ctx := context.Background() - _, err := c.UpdateWithDocIDs(ctx, []client.DocID{doc1.ID(), doc2.ID()}, `[ - { - "name": "Eric" - }, { - "name": "Bob" - } - ]`) - if err != nil { - return err - } - - d, err := c.Get(ctx, doc1.ID(), false) - if err != nil { - return err - } - - name, err := d.Get("name") - if err != nil { - return err - } - - assert.Equal(t, "John", name) - - d2, err := c.Get(ctx, doc2.ID(), false) - if err != nil { - return err - } - - name2, err := d2.Get("name") - if err != nil { - return err - } - - assert.Equal(t, "Sam", name2) - - return nil - }, - }, - }, - }, { - Description: "Test update users with docIDs", - Docs: map[string][]string{ - "Users": { - docStr1, - docStr2, - }, - }, - CollectionCalls: map[string][]func(client.Collection) error{ - "Users": []func(c client.Collection) error{ - func(c client.Collection) error { - ctx := context.Background() - _, err := c.UpdateWithDocIDs(ctx, []client.DocID{doc1.ID(), doc2.ID()}, `{ - "age": 40 - }`) - if err != nil { - return err - } - - d, err := c.Get(ctx, doc1.ID(), false) - if err != nil { - return err - } - - name, err := d.Get("age") - if err != nil { - return err - } - - assert.Equal(t, int64(40), name) - - d2, err := c.Get(ctx, doc2.ID(), false) - if err != nil { - return err - } - - name2, err := d2.Get("age") - if err != nil { - return err - } - - assert.Equal(t, int64(40), name2) - - return nil - }, - }, - }, - }, - } - - for _, test := range tests { - executeTestCase(t, test) - } -} diff --git a/tests/integration/collection/update/simple/with_filter_test.go b/tests/integration/collection/update/simple/with_filter_test.go index 1dc10b8de8..ebe45e0b4f 100644 --- a/tests/integration/collection/update/simple/with_filter_test.go +++ b/tests/integration/collection/update/simple/with_filter_test.go @@ -29,9 +29,11 @@ func TestUpdateWithInvalidFilterType(t *testing.T) { func(c client.Collection) error { ctx := context.Background() // test with an invalid filter type - _, err := c.UpdateWithFilter(ctx, t, `{ - "name": "Eric" - }`) + _, err := c.UpdateWithFilter( + ctx, + t, + `{"name": "Eric"}`, + ) return err }, }, @@ -51,9 +53,11 @@ func TestUpdateWithEmptyFilter(t *testing.T) { func(c client.Collection) error { ctx := context.Background() // test with an empty filter - _, err := c.UpdateWithFilter(ctx, "", `{ - "name": "Eric" - }`) + _, err := c.UpdateWithFilter( + ctx, + "", + `{"name": "Eric"}`, + ) return err }, }, @@ -70,7 +74,7 @@ func TestUpdateWithFilter(t *testing.T) { "age": 21 }` - doc, err := client.NewDocFromJSON([]byte(docStr), colDefMap["Users"].Schema) + doc, err := client.NewDocFromJSON([]byte(docStr), colDefMap["Users"]) if err != nil { assert.Fail(t, err.Error()) } @@ -87,9 +91,11 @@ func TestUpdateWithFilter(t *testing.T) { "Users": []func(c client.Collection) error{ func(c client.Collection) error { ctx := context.Background() - _, err := c.UpdateWithFilter(ctx, filter, `{ - name: "Eric" - }`) + _, err := c.UpdateWithFilter( + ctx, + filter, + `{name: "Eric"}`, + ) return err }, }, @@ -104,7 +110,11 @@ func TestUpdateWithFilter(t *testing.T) { "Users": []func(c client.Collection) error{ func(c client.Collection) error { ctx := context.Background() - _, err := c.UpdateWithFilter(ctx, filter, `"name: Eric"`) + _, err := c.UpdateWithFilter( + ctx, + filter, + `"name: Eric"`, + ) return err }, }, @@ -119,13 +129,17 @@ func TestUpdateWithFilter(t *testing.T) { "Users": []func(c client.Collection) error{ func(c client.Collection) error { ctx := context.Background() - _, err := c.UpdateWithFilter(ctx, filter, `[ - { - "name": "Eric" - }, { - "name": "Sam" - } - ]`) + _, err := c.UpdateWithFilter( + ctx, + filter, + `[ + { + "name": "Eric" + }, { + "name": "Sam" + } + ]`, + ) if err != nil { return err } @@ -155,9 +169,11 @@ func TestUpdateWithFilter(t *testing.T) { "Users": []func(c client.Collection) error{ func(c client.Collection) error { ctx := context.Background() - _, err := c.UpdateWithFilter(ctx, filter, `{ - "name": "Eric" - }`) + _, err := c.UpdateWithFilter( + ctx, + filter, + `{"name": "Eric"}`, + ) if err != nil { return err } diff --git a/tests/integration/collection/utils.go b/tests/integration/collection/utils.go index b8bf1cf46b..497637a5c3 100644 --- a/tests/integration/collection/utils.go +++ b/tests/integration/collection/utils.go @@ -86,7 +86,7 @@ func setupDatabase( } for _, docStr := range docs { - doc, err := client.NewDocFromJSON([]byte(docStr), col.Schema()) + doc, err := client.NewDocFromJSON([]byte(docStr), col.Definition()) if assertError(t, testCase.Description, err, testCase.ExpectedError) { return } diff --git a/tests/integration/collection_description/simple_test.go b/tests/integration/collection_description/simple_test.go new file mode 100644 index 0000000000..1070e8cd99 --- /dev/null +++ b/tests/integration/collection_description/simple_test.go @@ -0,0 +1,42 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package collection_description + +import ( + "testing" + + "github.com/sourcenetwork/immutable" + + "github.com/sourcenetwork/defradb/client" + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestColDescrSimpleCreatesColGivenEmptyType(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users {} + `, + }, + testUtils.GetCollections{ + ExpectedResults: []client.CollectionDescription{ + { + ID: 1, + Name: immutable.Some("Users"), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/collection_description/updates/add/collections_test.go b/tests/integration/collection_description/updates/add/collections_test.go new file mode 100644 index 0000000000..9193b57dc6 --- /dev/null +++ b/tests/integration/collection_description/updates/add/collections_test.go @@ -0,0 +1,107 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package add + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestColDescrUpdateAddCollections_WithUndefinedID_Errors(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users {} + `, + }, + testUtils.PatchCollection{ + Patch: ` + [ + { "op": "add", "path": "/2", "value": {"Name": "Dogs"} } + ] + `, + ExpectedError: "collection ID cannot be zero", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestColDescrUpdateAddCollections_WithZeroedID_Errors(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users {} + `, + }, + testUtils.PatchCollection{ + Patch: ` + [ + { "op": "add", "path": "/2", "value": {"ID": 0, "Name": "Dogs"} } + ] + `, + ExpectedError: "collection ID cannot be zero", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestColDescrUpdateAddCollections_Errors(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users {} + `, + }, + testUtils.PatchCollection{ + Patch: ` + [ + { "op": "add", "path": "/2", "value": {"ID": 2, "Name": "Dogs"} } + ] + `, + ExpectedError: "adding collections via patch is not supported. ID: 2", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestColDescrUpdateAddCollections_WithNoIndex_Errors(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users {} + `, + }, + testUtils.PatchCollection{ + Patch: ` + [ + { "op": "add", "path": "/-", "value": {"Name": "Dogs"} } + ] + `, + // We get this error because we are marshalling into a map[uint32]CollectionDescription, + // we will need to handle `-` when we allow adding collections via patches. + ExpectedError: "json: cannot unmarshal number - into Go value of type uint32", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/collection_description/updates/add/sources_test.go b/tests/integration/collection_description/updates/add/sources_test.go new file mode 100644 index 0000000000..c58ff4a660 --- /dev/null +++ b/tests/integration/collection_description/updates/add/sources_test.go @@ -0,0 +1,39 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package add + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestColDescrUpdateAddSources_Errors(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users {} + `, + }, + testUtils.PatchCollection{ + Patch: ` + [ + { "op": "add", "path": "/1/Sources/-", "value": {"SourceCollectionID": 1} } + ] + `, + ExpectedError: "collection sources cannot be added or removed. CollectionID: 1", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/collection_description/updates/copy/name_test.go b/tests/integration/collection_description/updates/copy/name_test.go new file mode 100644 index 0000000000..b915d111ac --- /dev/null +++ b/tests/integration/collection_description/updates/copy/name_test.go @@ -0,0 +1,98 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package copy + +import ( + "testing" + + "github.com/sourcenetwork/immutable" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestColDescrUpdateCopyName_Errors(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users {} + `, + }, + testUtils.SchemaPatch{ + Patch: ` + [ + { "op": "add", "path": "/Users/Fields/-", "value": {"Name": "name", "Kind": "String"} } + ] + `, + SetAsDefaultVersion: immutable.Some(false), + }, + testUtils.PatchCollection{ + Patch: ` + [ + { "op": "copy", "from": "/1/Name", "path": "/2/Name" } + ] + `, + ExpectedError: "collection already exists. Name: Users", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestColDescrUpdateCopyName(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users {} + `, + }, + testUtils.SchemaPatch{ + Patch: ` + [ + { "op": "add", "path": "/Users/Fields/-", "value": {"Name": "name", "Kind": "String"} } + ] + `, + SetAsDefaultVersion: immutable.Some(false), + }, + testUtils.PatchCollection{ + // Activate the second collection by setting its name to that of the first, + // then decativate the original collection version by removing the name + Patch: ` + [ + { "op": "copy", "from": "/1/Name", "path": "/2/Name" }, + { "op": "remove", "path": "/1/Name" } + ] + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John" + }`, + }, + testUtils.Request{ + Request: `query { + Users { + name + } + }`, + Results: []map[string]any{ + { + "name": "John", + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/collection_description/updates/move/name_test.go b/tests/integration/collection_description/updates/move/name_test.go new file mode 100644 index 0000000000..f493b03c1a --- /dev/null +++ b/tests/integration/collection_description/updates/move/name_test.go @@ -0,0 +1,66 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package move + +import ( + "testing" + + "github.com/sourcenetwork/immutable" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestColDescrUpdateMoveName(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users {} + `, + }, + testUtils.SchemaPatch{ + Patch: ` + [ + { "op": "add", "path": "/Users/Fields/-", "value": {"Name": "name", "Kind": "String"} } + ] + `, + SetAsDefaultVersion: immutable.Some(false), + }, + testUtils.PatchCollection{ + // Make the second collection the active one by moving its name from the first to the second + Patch: ` + [ + { "op": "move", "from": "/1/Name", "path": "/2/Name" } + ] + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John" + }`, + }, + testUtils.Request{ + Request: `query { + Users { + name + } + }`, + Results: []map[string]any{ + { + "name": "John", + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/collection_description/updates/remove/col_source_transform_test.go b/tests/integration/collection_description/updates/remove/col_source_transform_test.go new file mode 100644 index 0000000000..73179c16b0 --- /dev/null +++ b/tests/integration/collection_description/updates/remove/col_source_transform_test.go @@ -0,0 +1,80 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package remove + +import ( + "testing" + + "github.com/lens-vm/lens/host-go/config/model" + "github.com/sourcenetwork/immutable" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" + "github.com/sourcenetwork/defradb/tests/lenses" +) + +func TestColDescrUpdateRemoveCollectionSourceTransform(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad" + }`, + }, + testUtils.SchemaPatch{ + Patch: ` + [ + { "op": "add", "path": "/Users/Fields/-", "value": {"Name": "email", "Kind": 11} } + ] + `, + Lens: immutable.Some(model.Lens{ + Lenses: []model.LensModule{ + { + Path: lenses.SetDefaultModulePath, + Arguments: map[string]any{ + "dst": "name", + "value": "Fred", + }, + }, + }, + }), + }, + testUtils.PatchCollection{ + Patch: ` + [ + { "op": "remove", "path": "/2/Sources/0/Transform" } + ] + `, + }, + testUtils.Request{ + Request: `query { + Users { + name + } + }`, + // If the transform was not removed, `"Fred"` would have been returned + Results: []map[string]any{ + { + "name": "Shahzad", + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/collection_description/updates/remove/collections_test.go b/tests/integration/collection_description/updates/remove/collections_test.go new file mode 100644 index 0000000000..b9363bde66 --- /dev/null +++ b/tests/integration/collection_description/updates/remove/collections_test.go @@ -0,0 +1,41 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package remove + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestColDescrUpdateRemoveCollections(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + } + `, + }, + testUtils.PatchCollection{ + Patch: ` + [ + { "op": "remove", "path": "/1" } + ] + `, + ExpectedError: `collections cannot be deleted. CollectionID: 1`, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/collection_description/updates/remove/name_test.go b/tests/integration/collection_description/updates/remove/name_test.go new file mode 100644 index 0000000000..e352491cd7 --- /dev/null +++ b/tests/integration/collection_description/updates/remove/name_test.go @@ -0,0 +1,49 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package remove + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestColDescrUpdateRemoveName(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + } + `, + }, + testUtils.PatchCollection{ + Patch: ` + [ + { "op": "remove", "path": "/1/Name" } + ] + `, + }, + testUtils.Request{ + Request: `query { + Users { + name + } + }`, + // The Users collection has been deactivated and is no longer accessible + ExpectedError: `Cannot query field "Users" on type "Query".`, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/collection_description/updates/remove/policy_test.go b/tests/integration/collection_description/updates/remove/policy_test.go new file mode 100644 index 0000000000..1d4de5d399 --- /dev/null +++ b/tests/integration/collection_description/updates/remove/policy_test.go @@ -0,0 +1,82 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package remove + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" + acpUtils "github.com/sourcenetwork/defradb/tests/integration/acp" +) + +func TestColDescrUpdateRemovePolicy_Errors(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.AddPolicy{ + + Identity: acpUtils.Actor1Identity, + + Policy: ` + description: a test policy which marks a collection in a database as a resource + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + admin: + manages: + - reader + types: + - actor + `, + + ExpectedPolicyID: "53980e762616fcffbe76307995895e862f87ef3f21d509325d1dc772a770b001", + }, + + testUtils.SchemaUpdate{ + Schema: ` + type Users @policy( + id: "53980e762616fcffbe76307995895e862f87ef3f21d509325d1dc772a770b001", + resource: "users" + ) { + name: String + age: Int + } + `, + }, + + testUtils.PatchCollection{ + Patch: ` + [ + { "op": "remove", "path": "/1/Policy" } + ] + `, + ExpectedError: "collection policy cannot be mutated. CollectionID: 1", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/collection_description/updates/replace/col_source_source_id_test.go b/tests/integration/collection_description/updates/replace/col_source_source_id_test.go new file mode 100644 index 0000000000..3ca1a749f6 --- /dev/null +++ b/tests/integration/collection_description/updates/replace/col_source_source_id_test.go @@ -0,0 +1,53 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package replace + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestColDescrUpdateReplaceCollectionSourceSourceCollectionID_Errors(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad" + }`, + }, + testUtils.SchemaPatch{ + Patch: ` + [ + { "op": "add", "path": "/Users/Fields/-", "value": {"Name": "email", "Kind": 11} } + ] + `, + }, + testUtils.PatchCollection{ + Patch: ` + [ + { "op": "replace", "path": "/2/Sources/0/SourceCollectionID", "value": 3 } + ] + `, + ExpectedError: "collection source ID cannot be mutated. CollectionID: 2, NewCollectionSourceID: 3, OldCollectionSourceID: 1", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/collection_description/updates/replace/col_source_transform_test.go b/tests/integration/collection_description/updates/replace/col_source_transform_test.go new file mode 100644 index 0000000000..b933dcd2ed --- /dev/null +++ b/tests/integration/collection_description/updates/replace/col_source_transform_test.go @@ -0,0 +1,88 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package replace + +import ( + "encoding/json" + "fmt" + "testing" + + "github.com/lens-vm/lens/host-go/config/model" + "github.com/stretchr/testify/require" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" + "github.com/sourcenetwork/defradb/tests/lenses" +) + +func TestColDescrUpdateReplaceCollectionSourceTransform(t *testing.T) { + transformCfgJson, err := json.Marshal( + model.Lens{ + Lenses: []model.LensModule{ + { + Path: lenses.SetDefaultModulePath, + Arguments: map[string]any{ + "dst": "name", + "value": "Fred", + }, + }, + }, + }, + ) + require.NoError(t, err) + + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad" + }`, + }, + testUtils.SchemaPatch{ + Patch: ` + [ + { "op": "add", "path": "/Users/Fields/-", "value": {"Name": "email", "Kind": 11} } + ] + `, + }, + testUtils.PatchCollection{ + Patch: fmt.Sprintf(` + [ + { "op": "replace", "path": "/2/Sources/0/Transform", "value": %s } + ] + `, + transformCfgJson, + ), + }, + testUtils.Request{ + Request: `query { + Users { + name + } + }`, + // Without the new transform, `"Shahzad"` would have been returned + Results: []map[string]any{ + { + "name": "Fred", + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/collection_description/updates/replace/fields_test.go b/tests/integration/collection_description/updates/replace/fields_test.go new file mode 100644 index 0000000000..03aa8cdb1e --- /dev/null +++ b/tests/integration/collection_description/updates/replace/fields_test.go @@ -0,0 +1,39 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package replace + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestColDescrUpdateReplaceFields_Errors(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users {} + `, + }, + testUtils.PatchCollection{ + Patch: ` + [ + { "op": "replace", "path": "/1/Fields", "value": [{}] } + ] + `, + ExpectedError: "collection fields cannot be mutated. CollectionID: 1", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/collection_description/updates/replace/id_test.go b/tests/integration/collection_description/updates/replace/id_test.go new file mode 100644 index 0000000000..a89dad193b --- /dev/null +++ b/tests/integration/collection_description/updates/replace/id_test.go @@ -0,0 +1,146 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package replace + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestColDescrUpdateReplaceID_WithZero_Errors(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users {} + `, + }, + testUtils.PatchCollection{ + Patch: ` + [ + { "op": "replace", "path": "/1/ID", "value": 0 } + ] + `, + ExpectedError: "collection ID cannot be zero", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestColDescrUpdateReplaceID_WithExisting_Errors(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users {} + `, + }, + testUtils.SchemaUpdate{ + Schema: ` + type Books {} + `, + }, + testUtils.PatchCollection{ + Patch: ` + [ + { "op": "replace", "path": "/1/ID", "value": 2 } + ] + `, + ExpectedError: "collection already exists. ID: 2", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestColDescrUpdateReplaceID_WithExistingSameRoot_Errors(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users {} + `, + }, + testUtils.SchemaPatch{ + Patch: ` + [ + { "op": "add", "path": "/Users/Fields/-", "value": {"Name": "name", "Kind": "String"} } + ] + `, + }, + testUtils.PatchCollection{ + Patch: ` + [ + { "op": "replace", "path": "/1/ID", "value": 2 }, + { "op": "replace", "path": "/2/ID", "value": 1 } + ] + `, + ExpectedError: "collection sources cannot be added or removed.", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestColDescrUpdateReplaceID_WithExistingDifferentRoot_Errors(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users {} + `, + }, + testUtils.SchemaUpdate{ + Schema: ` + type Dogs {} + `, + }, + testUtils.PatchCollection{ + Patch: ` + [ + { "op": "replace", "path": "/1/ID", "value": 2 }, + { "op": "replace", "path": "/2/ID", "value": 1 } + ] + `, + ExpectedError: "collection root ID cannot be mutated.", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestColDescrUpdateReplaceID_WithNew_Errors(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users {} + `, + }, + testUtils.PatchCollection{ + Patch: ` + [ + { "op": "replace", "path": "/1/ID", "value": 2 } + ] + `, + ExpectedError: "adding collections via patch is not supported. ID: 2", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/collection_description/updates/replace/indexes_test.go b/tests/integration/collection_description/updates/replace/indexes_test.go new file mode 100644 index 0000000000..9302d1f192 --- /dev/null +++ b/tests/integration/collection_description/updates/replace/indexes_test.go @@ -0,0 +1,39 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package replace + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestColDescrUpdateReplaceIndexes_Errors(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users {} + `, + }, + testUtils.PatchCollection{ + Patch: ` + [ + { "op": "replace", "path": "/1/Indexes", "value": [{}] } + ] + `, + ExpectedError: "collection indexes cannot be mutated. CollectionID: 1", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/collection_description/updates/replace/name_test.go b/tests/integration/collection_description/updates/replace/name_test.go new file mode 100644 index 0000000000..98f1ba8c98 --- /dev/null +++ b/tests/integration/collection_description/updates/replace/name_test.go @@ -0,0 +1,210 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package replace + +import ( + "testing" + + "github.com/sourcenetwork/immutable" + + "github.com/sourcenetwork/defradb/client" + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestColDescrUpdateReplaceName_GivenExistingName(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John" + }`, + }, + testUtils.PatchCollection{ + Patch: ` + [ + { "op": "replace", "path": "/1/Name", "value": "Actors" } + ] + `, + }, + testUtils.GetCollections{ + ExpectedResults: []client.CollectionDescription{ + { + ID: 1, + Name: immutable.Some("Actors"), + }, + }, + }, + testUtils.Request{ + Request: `query { + Users { + name + } + }`, + ExpectedError: `Cannot query field "Users" on type "Query".`, + }, + testUtils.Request{ + Request: `query { + Actors { + name + } + }`, + Results: []map[string]any{ + { + "name": "John", + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestColDescrUpdateReplaceName_GivenInactiveCollectionWithSameName_Errors(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + } + `, + }, + testUtils.SchemaPatch{ + Patch: ` + [ + { "op": "add", "path": "/Users/Fields/-", "value": {"Name": "foo", "Kind": "String"} } + ] + `, + SetAsDefaultVersion: immutable.Some(false), + }, + testUtils.PatchCollection{ + Patch: ` + [ + { "op": "replace", "path": "/2/Name", "value": "Users" } + ] + `, + ExpectedError: "collection already exists. Name: Users", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestColDescrUpdateReplaceName_GivenInactiveCollection_Errors(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + } + `, + }, + testUtils.SchemaPatch{ + Patch: ` + [ + { "op": "add", "path": "/Users/Fields/-", "value": {"Name": "foo", "Kind": "String"} } + ] + `, + SetAsDefaultVersion: immutable.Some(false), + }, + testUtils.PatchCollection{ + Patch: ` + [ + { "op": "replace", "path": "/2/Name", "value": "Actors" } + ] + `, + // The params at the end of the error message is dependant on the order Go decides to iterate through + // a map and so is not included in the test. + ExpectedError: "multiple versions of same collection cannot be active", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestColDescrUpdateReplaceName_RemoveExistingName(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John" + }`, + }, + testUtils.SchemaPatch{ + Patch: ` + [ + { "op": "add", "path": "/Users/Fields/-", "value": {"Name": "foo", "Kind": "String"} } + ] + `, + SetAsDefaultVersion: immutable.Some(false), + }, + testUtils.PatchCollection{ + Patch: ` + [ + { "op": "remove", "path": "/1/Name" }, + { "op": "replace", "path": "/2/Name", "value": "Actors" } + ] + `, + }, + testUtils.GetCollections{ + FilterOptions: client.CollectionFetchOptions{ + IncludeInactive: immutable.Some(true), + }, + ExpectedResults: []client.CollectionDescription{ + { + ID: 1, + }, + { + ID: 2, + Name: immutable.Some("Actors"), + Sources: []any{ + &client.CollectionSource{ + SourceCollectionID: 1, + }, + }, + }, + }, + }, + testUtils.Request{ + Request: `query { + Actors { + name + } + }`, + Results: []map[string]any{ + { + "name": "John", + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/collection_description/updates/replace/policy_test.go b/tests/integration/collection_description/updates/replace/policy_test.go new file mode 100644 index 0000000000..f71b652c59 --- /dev/null +++ b/tests/integration/collection_description/updates/replace/policy_test.go @@ -0,0 +1,83 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package replace + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestColDescrUpdateReplacePolicy_Errors(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users {} + `, + }, + testUtils.PatchCollection{ + Patch: ` + [ + { "op": "replace", "path": "/1/Policy", "value": {} } + ] + `, + ExpectedError: "collection policy cannot be mutated. CollectionID: 1", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestColDescrUpdateReplacePolicyID_Errors(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users {} + `, + }, + testUtils.PatchCollection{ + Patch: ` + [ + { "op": "replace", "path": "/1/Policy", "value": {"ID": "dfe202ffb4f0fe9b46157c313213a383"} } + ] + `, + ExpectedError: "collection policy cannot be mutated. CollectionID: 1", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestColDescrUpdateReplacePolicyResource_Errors(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users {} + `, + }, + testUtils.PatchCollection{ + Patch: ` + [ + { "op": "replace", "path": "/1/Policy", "value": {"ResourceName": "mutatingResource"} } + ] + `, + ExpectedError: "collection policy cannot be mutated. CollectionID: 1", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/collection_description/updates/replace/query_source_query_test.go b/tests/integration/collection_description/updates/replace/query_source_query_test.go new file mode 100644 index 0000000000..789f4b2d7b --- /dev/null +++ b/tests/integration/collection_description/updates/replace/query_source_query_test.go @@ -0,0 +1,141 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package replace + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestColDescrUpdateReplaceQuerySourceQuery(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + } + `, + }, + testUtils.SchemaUpdate{ + Schema: ` + type Books { + name: String + } + `, + }, + testUtils.CreateView{ + // Create the view on the `Books` collection + Query: ` + Books { + name + } + `, + SDL: ` + type View { + name: String + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad" + }`, + }, + testUtils.PatchCollection{ + // Patch the view query definition so that it now queries the `Users` collection + Patch: ` + [ + { "op": "replace", "path": "/3/Sources/0/Query", "value": {"Name": "Users", "Fields":[{"Name":"name"}]} } + ] + `, + }, + testUtils.Request{ + Request: `query { + View { + name + } + }`, + // If the view was still querying `Books` there would be no results + Results: []map[string]any{ + { + "name": "Shahzad", + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestColDescrUpdateReplaceQuerySourceQueryName(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + } + `, + }, + testUtils.SchemaUpdate{ + Schema: ` + type Books { + name: String + } + `, + }, + testUtils.CreateView{ + // Create the view on the `Books` collection + Query: ` + Books { + name + } + `, + SDL: ` + type View { + name: String + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad" + }`, + }, + testUtils.PatchCollection{ + // Patch the view query definition so that it now queries the `Users` collection + Patch: ` + [ + { "op": "replace", "path": "/3/Sources/0/Query/Name", "value": "Users" } + ] + `, + }, + testUtils.Request{ + Request: `query { + View { + name + } + }`, + // If the view was still querying `Books` there would be no results + Results: []map[string]any{ + { + "name": "Shahzad", + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/collection_description/updates/replace/query_source_transform_test.go b/tests/integration/collection_description/updates/replace/query_source_transform_test.go new file mode 100644 index 0000000000..89a2598010 --- /dev/null +++ b/tests/integration/collection_description/updates/replace/query_source_transform_test.go @@ -0,0 +1,113 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package replace + +import ( + "encoding/json" + "fmt" + "testing" + + "github.com/lens-vm/lens/host-go/config/model" + "github.com/sourcenetwork/immutable" + "github.com/stretchr/testify/require" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" + "github.com/sourcenetwork/defradb/tests/lenses" +) + +func TestColDescrUpdateReplaceQuerySourceTransform(t *testing.T) { + newTransformCfgJson, err := json.Marshal( + model.Lens{ + Lenses: []model.LensModule{ + { + Path: lenses.CopyModulePath, + Arguments: map[string]any{ + "src": "lastName", + "dst": "fullName", + }, + }, + }, + }, + ) + require.NoError(t, err) + + test := testUtils.TestCase{ + Description: "Simple view with transform", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + firstName: String + lastName: String + } + `, + }, + testUtils.CreateView{ + Query: ` + User { + firstName + lastName + } + `, + SDL: ` + type UserView { + fullName: String + } + `, + Transform: immutable.Some(model.Lens{ + // This transform will copy the value from `firstName` into the `fullName` field, + // like an overly-complicated alias + Lenses: []model.LensModule{ + { + Path: lenses.CopyModulePath, + Arguments: map[string]any{ + "src": "firstName", + "dst": "fullName", + }, + }, + }, + }), + }, + testUtils.PatchCollection{ + Patch: fmt.Sprintf(` + [ + { "op": "replace", "path": "/2/Sources/0/Transform", "value": %s } + ] + `, + newTransformCfgJson, + ), + }, + testUtils.CreateDoc{ + // Set the `name` field only + Doc: `{ + "firstName": "John", + "lastName": "S" + }`, + }, + testUtils.Request{ + Request: ` + query { + UserView { + fullName + } + } + `, + Results: []map[string]any{ + { + "fullName": "S", + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/collection_description/updates/replace/root_id_test.go b/tests/integration/collection_description/updates/replace/root_id_test.go new file mode 100644 index 0000000000..fee98f0664 --- /dev/null +++ b/tests/integration/collection_description/updates/replace/root_id_test.go @@ -0,0 +1,39 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package replace + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestColDescrUpdateReplaceRootID_Errors(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users {} + `, + }, + testUtils.PatchCollection{ + Patch: ` + [ + { "op": "replace", "path": "/1/RootID", "value": 2 } + ] + `, + ExpectedError: "collection root ID cannot be mutated. CollectionID: 1", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/collection_description/updates/replace/schema_version_id_test.go b/tests/integration/collection_description/updates/replace/schema_version_id_test.go new file mode 100644 index 0000000000..e4b1e7f42c --- /dev/null +++ b/tests/integration/collection_description/updates/replace/schema_version_id_test.go @@ -0,0 +1,39 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package replace + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestColDescrUpdateReplaceSchemaVersionID_Errors(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users {} + `, + }, + testUtils.PatchCollection{ + Patch: ` + [ + { "op": "replace", "path": "/1/SchemaVersionID", "value": "ghfdsas" } + ] + `, + ExpectedError: "collection schema version ID cannot be mutated. CollectionID: 1", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/collection_description/updates/replace/sources_test.go b/tests/integration/collection_description/updates/replace/sources_test.go new file mode 100644 index 0000000000..2f6bf7ca69 --- /dev/null +++ b/tests/integration/collection_description/updates/replace/sources_test.go @@ -0,0 +1,61 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package replace + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestColDescrUpdateReplaceSources_Errors(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users {} + `, + }, + testUtils.PatchCollection{ + Patch: ` + [ + { "op": "replace", "path": "/1/Sources", "value": [{"SourceCollectionID": 1}] } + ] + `, + ExpectedError: "collection sources cannot be added or removed. CollectionID: 1", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestColDescrUpdateReplaceSourcesWithQuerySource_Errors(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users {} + `, + }, + testUtils.PatchCollection{ + Patch: ` + [ + { "op": "replace", "path": "/1/Sources", "value": [{"Query": {"Name": "Users"}}] } + ] + `, + ExpectedError: "collection sources cannot be added or removed. CollectionID: 1", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/collection_description/updates/test/name_test.go b/tests/integration/collection_description/updates/test/name_test.go new file mode 100644 index 0000000000..7baa13aca1 --- /dev/null +++ b/tests/integration/collection_description/updates/test/name_test.go @@ -0,0 +1,60 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestColDescrUpdateTestName(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users {} + `, + }, + testUtils.PatchCollection{ + Patch: ` + [ + { "op": "test", "path": "/1/Name", "value": "Users" } + ] + `, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestColDescrUpdateTestName_Fails(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users {} + `, + }, + testUtils.PatchCollection{ + Patch: ` + [ + { "op": "test", "path": "/1/Name", "value": "Dogs" } + ] + `, + ExpectedError: "testing value /1/Name failed: test failed", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/db.go b/tests/integration/db.go index b103f656b3..73d8818934 100644 --- a/tests/integration/db.go +++ b/tests/integration/db.go @@ -76,6 +76,7 @@ func NewBadgerMemoryDB(ctx context.Context, dbopts ...db.Option) (client.DB, err if err != nil { return nil, err } + dbopts = append(dbopts, db.WithACPInMemory()) db, err := db.NewDB(ctx, rootstore, dbopts...) if err != nil { return nil, err @@ -84,6 +85,7 @@ func NewBadgerMemoryDB(ctx context.Context, dbopts ...db.Option) (client.DB, err } func NewInMemoryDB(ctx context.Context, dbopts ...db.Option) (client.DB, error) { + dbopts = append(dbopts, db.WithACPInMemory()) db, err := db.NewDB(ctx, memory.NewDatastore(ctx), dbopts...) if err != nil { return nil, err @@ -110,14 +112,18 @@ func NewBadgerFileDB(ctx context.Context, t testing.TB, dbopts ...db.Option) (cl opts := &badgerds.Options{ Options: badger.DefaultOptions(dbPath), } + rootstore, err := badgerds.NewDatastore(dbPath, opts) if err != nil { return nil, "", err } + + dbopts = append(dbopts, db.WithACP(dbPath)) db, err := db.NewDB(ctx, rootstore, dbopts...) if err != nil { return nil, "", err } + return db, dbPath, err } diff --git a/tests/integration/events/simple/with_create_test.go b/tests/integration/events/simple/with_create_test.go index ec5c174106..c3f88eea58 100644 --- a/tests/integration/events/simple/with_create_test.go +++ b/tests/integration/events/simple/with_create_test.go @@ -28,7 +28,7 @@ func TestEventsSimpleWithCreate(t *testing.T) { "name": "John" }`, ), - colDefMap["Users"].Schema, + colDefMap["Users"], ) assert.Nil(t, err) docID1 := doc1.ID().String() @@ -39,7 +39,7 @@ func TestEventsSimpleWithCreate(t *testing.T) { "name": "Shahzad" }`, ), - colDefMap["Users"].Schema, + colDefMap["Users"], ) assert.Nil(t, err) docID2 := doc2.ID().String() diff --git a/tests/integration/events/simple/with_create_txn_test.go b/tests/integration/events/simple/with_create_txn_test.go index c890792157..7d7238b546 100644 --- a/tests/integration/events/simple/with_create_txn_test.go +++ b/tests/integration/events/simple/with_create_txn_test.go @@ -18,6 +18,7 @@ import ( "github.com/stretchr/testify/assert" "github.com/sourcenetwork/defradb/client" + "github.com/sourcenetwork/defradb/db" testUtils "github.com/sourcenetwork/defradb/tests/integration/events" ) @@ -40,7 +41,9 @@ func TestEventsSimpleWithCreateWithTxnDiscarded(t *testing.T) { func(ctx context.Context, d client.DB) { txn, err := d.NewTxn(ctx, false) assert.Nil(t, err) - r := d.WithTxn(txn).ExecRequest( + + ctx = db.SetContextTxn(ctx, txn) + r := d.ExecRequest( ctx, `mutation { create_Users(input: {name: "Shahzad"}) { diff --git a/tests/integration/events/simple/with_delete_test.go b/tests/integration/events/simple/with_delete_test.go index b02b2505e1..141965966f 100644 --- a/tests/integration/events/simple/with_delete_test.go +++ b/tests/integration/events/simple/with_delete_test.go @@ -28,7 +28,7 @@ func TestEventsSimpleWithDelete(t *testing.T) { "name": "John" }`, ), - colDefMap["Users"].Schema, + colDefMap["Users"], ) assert.Nil(t, err) docID1 := doc1.ID().String() diff --git a/tests/integration/events/simple/with_update_test.go b/tests/integration/events/simple/with_update_test.go index 2f0960b977..d224690827 100644 --- a/tests/integration/events/simple/with_update_test.go +++ b/tests/integration/events/simple/with_update_test.go @@ -28,7 +28,7 @@ func TestEventsSimpleWithUpdate(t *testing.T) { "name": "John" }`, ), - colDefMap["Users"].Schema, + colDefMap["Users"], ) assert.Nil(t, err) docID1 := doc1.ID().String() @@ -39,7 +39,7 @@ func TestEventsSimpleWithUpdate(t *testing.T) { "name": "Shahzad" }`, ), - colDefMap["Users"].Schema, + colDefMap["Users"], ) assert.Nil(t, err) docID2 := doc2.ID().String() @@ -66,14 +66,14 @@ func TestEventsSimpleWithUpdate(t *testing.T) { ExpectedUpdates: []testUtils.ExpectedUpdate{ { DocID: immutable.Some(docID1), - Cid: immutable.Some("bafybeidzstxabh7qktq7pkmmxvpjbnwklxz3h5l6d425ldvjy65xvvuxu4"), + Cid: immutable.Some("bafybeif757a4mdwimqwl24ujjnao6xlajiajz2hwuleopnptusuttri6zu"), }, { DocID: immutable.Some(docID2), }, { DocID: immutable.Some(docID1), - Cid: immutable.Some("bafybeiah75qvtqxflw3urgejxetaugpcddx5h2ocj7pid34zjyy7tpp6wi"), + Cid: immutable.Some("bafybeifhmjw6ay5rvwznqh37ogcw5hrmqtxrnredoh6psn7lhgtdc253km"), }, }, } diff --git a/tests/integration/events/utils.go b/tests/integration/events/utils.go index d2bf418294..8b998d0051 100644 --- a/tests/integration/events/utils.go +++ b/tests/integration/events/utils.go @@ -149,7 +149,7 @@ func setupDatabase( require.NoError(t, err) for _, docStr := range docs { - doc, err := client.NewDocFromJSON([]byte(docStr), col.Schema()) + doc, err := client.NewDocFromJSON([]byte(docStr), col.Definition()) require.NoError(t, err) err = col.Save(ctx, doc) diff --git a/tests/integration/explain.go b/tests/integration/explain.go index 0b9c4c2dab..da2adb69e5 100644 --- a/tests/integration/explain.go +++ b/tests/integration/explain.go @@ -15,12 +15,12 @@ import ( "sort" "testing" + "github.com/sourcenetwork/corelog" "github.com/sourcenetwork/immutable" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "github.com/sourcenetwork/defradb/client" - "github.com/sourcenetwork/defradb/logging" ) var ( @@ -78,6 +78,9 @@ type ExplainRequest struct { // NodeID is the node ID (index) of the node in which to explain. NodeID immutable.Option[int] + // The identity of this request. + Identity string + // Has to be a valid explain request type (one of: 'simple', 'debug', 'execute', 'predict'). Request string @@ -127,7 +130,10 @@ func executeExplainRequest( } for _, node := range getNodes(action.NodeID, s.nodes) { - result := node.ExecRequest(s.ctx, action.Request) + result := node.ExecRequest( + s.ctx, + action.Request, + ) assertExplainRequestResults(s, &result.GQL, action) } } @@ -151,7 +157,7 @@ func assertExplainRequestResults( // Note: if returned gql result is `nil` this panics (the panic seems useful while testing). resultantData := actualResult.Data.([]map[string]any) - log.Info(s.ctx, "", logging.NewKV("FullExplainGraphResult", actualResult.Data)) + log.InfoContext(s.ctx, "", corelog.Any("FullExplainGraphResult", actualResult.Data)) // Check if the expected full explain graph (if provided) matches the actual full explain graph // that is returned, if doesn't match we would like to still see a diff comparison (handy while debugging). diff --git a/tests/integration/explain/default/type_join_many_test.go b/tests/integration/explain/default/type_join_many_test.go index 3b700b132b..031b509950 100644 --- a/tests/integration/explain/default/type_join_many_test.go +++ b/tests/integration/explain/default/type_join_many_test.go @@ -13,6 +13,8 @@ package test_explain_default import ( "testing" + "github.com/sourcenetwork/immutable" + testUtils "github.com/sourcenetwork/defradb/tests/integration" explainUtils "github.com/sourcenetwork/defradb/tests/integration/explain" ) @@ -53,7 +55,7 @@ func TestDefaultExplainRequestWithAOneToManyJoin(t *testing.T) { IncludeChildNodes: false, ExpectedAttributes: dataMap{ "joinType": "typeJoinMany", - "rootName": "author", + "rootName": immutable.Some("author"), "subTypeName": "articles", }, }, diff --git a/tests/integration/explain/default/type_join_one_test.go b/tests/integration/explain/default/type_join_one_test.go index 8a7fac0925..3059bf8528 100644 --- a/tests/integration/explain/default/type_join_one_test.go +++ b/tests/integration/explain/default/type_join_one_test.go @@ -13,6 +13,8 @@ package test_explain_default import ( "testing" + "github.com/sourcenetwork/immutable" + testUtils "github.com/sourcenetwork/defradb/tests/integration" explainUtils "github.com/sourcenetwork/defradb/tests/integration/explain" ) @@ -54,7 +56,7 @@ func TestDefaultExplainRequestWithAOneToOneJoin(t *testing.T) { ExpectedAttributes: dataMap{ "direction": "primary", "joinType": "typeJoinOne", - "rootName": "author", + "rootName": immutable.Some("author"), "subTypeName": "contact", }, }, @@ -163,7 +165,7 @@ func TestDefaultExplainRequestWithTwoLevelDeepNestedJoins(t *testing.T) { ExpectedAttributes: dataMap{ "direction": "primary", "joinType": "typeJoinOne", - "rootName": "author", + "rootName": immutable.Some("author"), "subTypeName": "contact", }, }, @@ -196,7 +198,7 @@ func TestDefaultExplainRequestWithTwoLevelDeepNestedJoins(t *testing.T) { ExpectedAttributes: dataMap{ "direction": "primary", "joinType": "typeJoinOne", - "rootName": "contact", + "rootName": immutable.Some("contact"), "subTypeName": "address", }, }, diff --git a/tests/integration/explain/default/type_join_test.go b/tests/integration/explain/default/type_join_test.go index fd1676aed9..c09c1b0f12 100644 --- a/tests/integration/explain/default/type_join_test.go +++ b/tests/integration/explain/default/type_join_test.go @@ -13,6 +13,8 @@ package test_explain_default import ( "testing" + "github.com/sourcenetwork/immutable" + testUtils "github.com/sourcenetwork/defradb/tests/integration" explainUtils "github.com/sourcenetwork/defradb/tests/integration/explain" ) @@ -86,7 +88,7 @@ func TestDefaultExplainRequestWith2SingleJoinsAnd1ManyJoin(t *testing.T) { ExpectedAttributes: dataMap{ "direction": "primary", "joinType": "typeJoinOne", - "rootName": "author", + "rootName": immutable.Some("author"), "subTypeName": "contact", }, }, @@ -144,7 +146,7 @@ func TestDefaultExplainRequestWith2SingleJoinsAnd1ManyJoin(t *testing.T) { IncludeChildNodes: false, ExpectedAttributes: dataMap{ "joinType": "typeJoinMany", - "rootName": "author", + "rootName": immutable.Some("author"), "subTypeName": "articles", }, }, @@ -204,7 +206,7 @@ func TestDefaultExplainRequestWith2SingleJoinsAnd1ManyJoin(t *testing.T) { ExpectedAttributes: dataMap{ "direction": "primary", "joinType": "typeJoinOne", - "rootName": "author", + "rootName": immutable.Some("author"), "subTypeName": "contact", }, }, diff --git a/tests/integration/explain/default/with_average_join_test.go b/tests/integration/explain/default/with_average_join_test.go index 265ca932ce..d1cd68046e 100644 --- a/tests/integration/explain/default/with_average_join_test.go +++ b/tests/integration/explain/default/with_average_join_test.go @@ -13,6 +13,8 @@ package test_explain_default import ( "testing" + "github.com/sourcenetwork/immutable" + testUtils "github.com/sourcenetwork/defradb/tests/integration" explainUtils "github.com/sourcenetwork/defradb/tests/integration/explain" ) @@ -96,7 +98,7 @@ func TestDefaultExplainRequestWithAverageOnJoinedField(t *testing.T) { IncludeChildNodes: false, ExpectedAttributes: dataMap{ "joinType": "typeJoinMany", - "rootName": "author", + "rootName": immutable.Some("author"), "subTypeName": "books", }, }, @@ -253,7 +255,7 @@ func TestDefaultExplainRequestWithAverageOnMultipleJoinedFieldsWithFilter(t *tes IncludeChildNodes: false, ExpectedAttributes: dataMap{ "joinType": "typeJoinMany", - "rootName": "author", + "rootName": immutable.Some("author"), "subTypeName": "books", }, }, @@ -299,7 +301,7 @@ func TestDefaultExplainRequestWithAverageOnMultipleJoinedFieldsWithFilter(t *tes IncludeChildNodes: false, ExpectedAttributes: dataMap{ "joinType": "typeJoinMany", - "rootName": "author", + "rootName": immutable.Some("author"), "subTypeName": "articles", }, }, diff --git a/tests/integration/explain/default/with_count_join_test.go b/tests/integration/explain/default/with_count_join_test.go index 3f7802820d..4833354bba 100644 --- a/tests/integration/explain/default/with_count_join_test.go +++ b/tests/integration/explain/default/with_count_join_test.go @@ -13,6 +13,8 @@ package test_explain_default import ( "testing" + "github.com/sourcenetwork/immutable" + testUtils "github.com/sourcenetwork/defradb/tests/integration" explainUtils "github.com/sourcenetwork/defradb/tests/integration/explain" ) @@ -66,7 +68,7 @@ func TestDefaultExplainRequestWithCountOnOneToManyJoinedField(t *testing.T) { IncludeChildNodes: false, ExpectedAttributes: dataMap{ "joinType": "typeJoinMany", - "rootName": "author", + "rootName": immutable.Some("author"), "subTypeName": "books", }, }, @@ -175,7 +177,7 @@ func TestDefaultExplainRequestWithCountOnOneToManyJoinedFieldWithManySources(t * IncludeChildNodes: false, ExpectedAttributes: dataMap{ "joinType": "typeJoinMany", - "rootName": "author", + "rootName": immutable.Some("author"), "subTypeName": "books", }, }, @@ -217,7 +219,7 @@ func TestDefaultExplainRequestWithCountOnOneToManyJoinedFieldWithManySources(t * IncludeChildNodes: false, ExpectedAttributes: dataMap{ "joinType": "typeJoinMany", - "rootName": "author", + "rootName": immutable.Some("author"), "subTypeName": "articles", }, }, diff --git a/tests/integration/explain/default/with_sum_join_test.go b/tests/integration/explain/default/with_sum_join_test.go index 5117031959..0889b3bd85 100644 --- a/tests/integration/explain/default/with_sum_join_test.go +++ b/tests/integration/explain/default/with_sum_join_test.go @@ -13,6 +13,8 @@ package test_explain_default import ( "testing" + "github.com/sourcenetwork/immutable" + testUtils "github.com/sourcenetwork/defradb/tests/integration" explainUtils "github.com/sourcenetwork/defradb/tests/integration/explain" ) @@ -70,7 +72,7 @@ func TestDefaultExplainRequestWithSumOnOneToManyJoinedField(t *testing.T) { IncludeChildNodes: false, ExpectedAttributes: dataMap{ "joinType": "typeJoinMany", - "rootName": "author", + "rootName": immutable.Some("author"), "subTypeName": "books", }, }, @@ -165,7 +167,7 @@ func TestDefaultExplainRequestWithSumOnOneToManyJoinedFieldWithFilter(t *testing IncludeChildNodes: false, ExpectedAttributes: dataMap{ "joinType": "typeJoinMany", - "rootName": "author", + "rootName": immutable.Some("author"), "subTypeName": "articles", }, }, @@ -280,7 +282,7 @@ func TestDefaultExplainRequestWithSumOnOneToManyJoinedFieldWithManySources(t *te IncludeChildNodes: false, ExpectedAttributes: dataMap{ "joinType": "typeJoinMany", - "rootName": "author", + "rootName": immutable.Some("author"), "subTypeName": "books", }, }, @@ -322,7 +324,7 @@ func TestDefaultExplainRequestWithSumOnOneToManyJoinedFieldWithManySources(t *te IncludeChildNodes: false, ExpectedAttributes: dataMap{ "joinType": "typeJoinMany", - "rootName": "author", + "rootName": immutable.Some("author"), "subTypeName": "articles", }, }, diff --git a/tests/integration/explain/fixture.go b/tests/integration/explain/fixture.go index c531d95a84..83db5ff926 100644 --- a/tests/integration/explain/fixture.go +++ b/tests/integration/explain/fixture.go @@ -38,14 +38,14 @@ var SchemaForExplainTests = testUtils.SchemaUpdate{ verified: Boolean books: [Book] articles: [Article] - contact: AuthorContact + contact: AuthorContact @primary } type AuthorContact { cell: String email: String author: Author - address: ContactAddress + address: ContactAddress @primary } type ContactAddress { diff --git a/tests/integration/index/query_with_compound_filter_relation_test.go b/tests/integration/index/query_with_compound_filter_relation_test.go new file mode 100644 index 0000000000..ff503d6d38 --- /dev/null +++ b/tests/integration/index/query_with_compound_filter_relation_test.go @@ -0,0 +1,362 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package index + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestIndex_QueryWithIndexOnOneToManyRelationAndFilter_NoData(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Program { + name: String + certificationBodyOrg: Organization + } + + type Organization { + name: String @index + programs: [Program] + }`, + }, + testUtils.Request{ + Request: `query { + Program( + filter: { + _and: [ + { certificationBodyOrg: { name: { _eq: "Test" } } } + ] + } + ) { + name + } + }`, + Results: []map[string]any{}, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestIndex_QueryWithIndexOnOneToManyRelationOrFilter_NoData(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Program { + name: String + certificationBodyOrg: Organization + } + + type Organization { + name: String @index + programs: [Program] + }`, + }, + testUtils.Request{ + Request: `query { + Program( + filter: { + _or: [ + { certificationBodyOrg: { name: { _eq: "Test" } } } + ] + } + ) { + name + } + }`, + Results: []map[string]any{}, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestIndex_QueryWithIndexOnOneToManyRelationNotFilter_NoData(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Program { + name: String + certificationBodyOrg: Organization + } + + type Organization { + name: String @index + programs: [Program] + }`, + }, + testUtils.Request{ + Request: `query { + Program( + filter: { + _not: { + certificationBodyOrg: { name: { _eq: "Test" } } + } + } + ) { + name + } + }`, + Results: []map[string]any{}, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestIndex_QueryWithIndexOnOneToManyRelationAndFilter_Data(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Program { + name: String + certificationBodyOrg: Organization + } + + type Organization { + name: String @index + programs: [Program] + }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, + Doc: `{ + "name": "Source Inc." + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: `{ + "certificationBodyOrg": "bae-2b020aba-0681-5896-91d6-e3224938c32e", + "name": "DefraDB" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: `{ + "certificationBodyOrg": "bae-2b020aba-0681-5896-91d6-e3224938c32e", + "name": "LensVM" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, + Doc: `{ + "name": "ESA" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: `{ + "certificationBodyOrg": "bae-5e7a0a2c-40a0-572c-93b6-79930cab3317", + "name": "Horizon" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: `{ + "name": "Zanzi" + }`, + }, + testUtils.Request{ + Request: `query { + Program( + filter: { + _and: [ + { certificationBodyOrg: { name: { _eq: "Source Inc." } } } + ] + } + ) { + name + } + }`, + Results: []map[string]any{ + { + "name": "DefraDB", + }, + { + "name": "LensVM", + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestIndex_QueryWithIndexOnOneToManyRelationOrFilter_Data(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Program { + name: String + certificationBodyOrg: Organization + } + + type Organization { + name: String @index + programs: [Program] + }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, + Doc: `{ + "name": "Source Inc." + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: `{ + "certificationBodyOrg": "bae-2b020aba-0681-5896-91d6-e3224938c32e", + "name": "DefraDB" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: `{ + "certificationBodyOrg": "bae-2b020aba-0681-5896-91d6-e3224938c32e", + "name": "LensVM" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, + Doc: `{ + "name": "ESA" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: `{ + "certificationBodyOrg": "bae-5e7a0a2c-40a0-572c-93b6-79930cab3317", + "name": "Horizon" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: `{ + "name": "Zanzi" + }`, + }, + testUtils.Request{ + Request: `query { + Program( + filter: { + _or: [ + { certificationBodyOrg: { name: { _eq: "Source Inc." } } }, + { name: { _eq: "Zanzi" } } + ] + } + ) { + name + } + }`, + Results: []map[string]any{ + { + "name": "Zanzi", + }, + { + "name": "DefraDB", + }, + { + "name": "LensVM", + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestIndex_QueryWithIndexOnOneToManyRelationNotFilter_Data(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Program { + name: String + certificationBodyOrg: Organization + } + + type Organization { + name: String @index + programs: [Program] + }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, + Doc: `{ + "name": "Source Inc." + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: `{ + "certificationBodyOrg": "bae-2b020aba-0681-5896-91d6-e3224938c32e", + "name": "DefraDB" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, + Doc: `{ + "name": "ESA" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: `{ + "certificationBodyOrg": "bae-5e7a0a2c-40a0-572c-93b6-79930cab3317", + "name": "Horizon" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: `{ + "name": "Zanzi" + }`, + }, + testUtils.Request{ + Request: `query { + Program( + filter: { + _not: { + certificationBodyOrg: { name: { _eq: "Source Inc." } } + } + } + ) { + name + } + }`, + Results: []map[string]any{ + { + "name": "Horizon", + }, + { + "name": "Zanzi", + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/index/query_with_index_only_field_order_test.go b/tests/integration/index/query_with_index_only_field_order_test.go index ae46213533..13a2f7cb77 100644 --- a/tests/integration/index/query_with_index_only_field_order_test.go +++ b/tests/integration/index/query_with_index_only_field_order_test.go @@ -108,7 +108,7 @@ func TestQueryWithIndex_IfFloatFieldInDescOrder_ShouldFetchInRevertedOrder(t *te testUtils.Request{ Request: ` query { - User(filter: {iq: {_gt: 1}}) { + User(filter: {iq: {_lt: 1}}) { name iq } diff --git a/tests/integration/index/query_with_index_only_filter_test.go b/tests/integration/index/query_with_index_only_filter_test.go index 0c2c337398..1baf7248ac 100644 --- a/tests/integration/index/query_with_index_only_filter_test.go +++ b/tests/integration/index/query_with_index_only_filter_test.go @@ -351,6 +351,59 @@ func TestQueryWithIndex_WithInFilter_ShouldFetch(t *testing.T) { testUtils.ExecuteTestCase(t, test) } +func TestQueryWithIndex_WithInFilterOnFloat_ShouldFetch(t *testing.T) { + test := testUtils.TestCase{ + Description: "Test index filtering with _in filter", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + rate: Float @index + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Islam", + "rate": 20.0 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "rate": 20.1 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Fred", + "rate": 20.2 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "rate": 20.3 + }`, + }, + testUtils.Request{ + Request: ` + query { + User(filter: {rate: {_in: [20, 20.2]}}) { + name + } + }`, + Results: []map[string]any{ + {"name": "Islam"}, + {"name": "Fred"}, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + func TestQueryWithIndex_IfSeveralDocsWithInFilter_ShouldFetchAll(t *testing.T) { req := `query { User(filter: {name: {_in: ["Islam"]}}) { diff --git a/tests/integration/index/query_with_relation_filter_test.go b/tests/integration/index/query_with_relation_filter_test.go index e3ae71429e..8fb6500eef 100644 --- a/tests/integration/index/query_with_relation_filter_test.go +++ b/tests/integration/index/query_with_relation_filter_test.go @@ -53,14 +53,16 @@ func TestQueryWithIndexOnOneToManyRelation_IfFilterOnIndexedRelation_ShouldFilte testUtils.Request{ Request: req1, Results: []map[string]any{ + {"name": "Keenan"}, {"name": "Islam"}, {"name": "Shahzad"}, - {"name": "Keenan"}, }, }, testUtils.Request{ - Request: makeExplainQuery(req1), - Asserter: testUtils.NewExplainAsserter().WithFieldFetches(6).WithIndexFetches(3), + Request: makeExplainQuery(req1), + // The invertable join does not support inverting one-many relations, so the index is + // not used. + Asserter: testUtils.NewExplainAsserter().WithFieldFetches(450).WithIndexFetches(0), }, testUtils.Request{ Request: req2, @@ -69,8 +71,10 @@ func TestQueryWithIndexOnOneToManyRelation_IfFilterOnIndexedRelation_ShouldFilte }, }, testUtils.Request{ - Request: makeExplainQuery(req2), - Asserter: testUtils.NewExplainAsserter().WithFieldFetches(2).WithIndexFetches(1), + Request: makeExplainQuery(req2), + // The invertable join does not support inverting one-many relations, so the index is + // not used. + Asserter: testUtils.NewExplainAsserter().WithFieldFetches(450).WithIndexFetches(0), }, }, } @@ -115,14 +119,16 @@ func TestQueryWithIndexOnOneToManyRelation_IfFilterOnIndexedRelation_ShouldFilte testUtils.Request{ Request: req1, Results: []map[string]any{ + {"name": "Keenan"}, {"name": "Islam"}, {"name": "Shahzad"}, - {"name": "Keenan"}, }, }, testUtils.Request{ - Request: makeExplainQuery(req1), - Asserter: testUtils.NewExplainAsserter().WithFieldFetches(6).WithIndexFetches(3), + Request: makeExplainQuery(req1), + // The invertable join does not support inverting one-many relations, so the index is + // not used. + Asserter: testUtils.NewExplainAsserter().WithFieldFetches(450).WithIndexFetches(0), }, testUtils.Request{ Request: req2, @@ -131,8 +137,10 @@ func TestQueryWithIndexOnOneToManyRelation_IfFilterOnIndexedRelation_ShouldFilte }, }, testUtils.Request{ - Request: makeExplainQuery(req2), - Asserter: testUtils.NewExplainAsserter().WithFieldFetches(2).WithIndexFetches(1), + Request: makeExplainQuery(req2), + // The invertable join does not support inverting one-many relations, so the index is + // not used. + Asserter: testUtils.NewExplainAsserter().WithFieldFetches(450).WithIndexFetches(0), }, }, } @@ -167,7 +175,7 @@ func TestQueryWithIndexOnOneToOnesSecondaryRelation_IfFilterOnIndexedRelation_Sh } type Address { - user: User + user: User @primary city: String @index }`, }, @@ -309,80 +317,235 @@ func TestQueryWithIndexOnOneToOnePrimaryRelation_IfFilterOnIndexedRelationWhileI testUtils.ExecuteTestCase(t, test) } -func TestQueryWithIndexOnOneToTwoRelation_IfFilterOnIndexedRelation_ShouldFilter(t *testing.T) { - req1 := `query { - User(filter: { - address: {city: {_eq: "Munich"}} - }) { - name - address { - city - } - } - }` - req2 := `query { +func TestQueryWithIndexOnOneToMany_IfFilterOnIndexedRelation_ShouldFilter(t *testing.T) { + test := testUtils.TestCase{ + Description: "Filter on indexed relation field in 1-N relations", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + devices: [Device] + } + + type Device { + model: String @index + manufacturer: String + owner: User + } + `, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: `{ + "name": "Chris" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: `{ + "name": "Addo" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, + Doc: `{ + "model": "Walkman", + "manufacturer": "Sony", + "owner": "bae-403d7337-f73e-5c81-8719-e853938c8985" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, + Doc: `{ + "model": "Walkman", + "manufacturer": "The Proclaimers", + "owner": "bae-403d7337-f73e-5c81-8719-e853938c8985" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, + Doc: `{ + "model": "Running Man", + "manufacturer": "Braveworld Productions", + "owner": "bae-403d7337-f73e-5c81-8719-e853938c8985" + }`, + }, + testUtils.Request{ + Request: `query { + User(filter: { + devices: {model: {_eq: "Walkman"}} + }) { + name + devices { + model + manufacturer + } + } + }`, + Results: []map[string]any{ + { + "name": "Chris", + "devices": []map[string]any{ + { + "model": "Walkman", + "manufacturer": "Sony", + }, + { + "model": "Walkman", + "manufacturer": "The Proclaimers", + }, + // The filter is on User, so all devices belonging to it will be returned + { + "model": "Running Man", + "manufacturer": "Braveworld Productions", + }, + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestQueryWithIndexOnOneToMany_IfFilterOnIndexedRelation_ShouldFilterWithExplain(t *testing.T) { + req := `query { User(filter: { devices: {model: {_eq: "Walkman"}} }) { name devices { model + manufacturer } } }` test := testUtils.TestCase{ - Description: "Filter on indexed relation field in 1-1 and 1-N relations", + Description: "Filter on indexed relation field in 1-N relations", Actions: []any{ testUtils.SchemaUpdate{ Schema: ` type User { - name: String - age: Int - address: Address - devices: [Device] + name: String + devices: [Device] } type Device { model: String @index + manufacturer: String owner: User - } - - type Address { - user: User - city: String @index - }`, - }, - testUtils.CreatePredefinedDocs{ - Docs: getUserDocs(), + } + `, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: `{ + "name": "Chris" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, + Doc: `{ + "model": "Walkman", + "manufacturer": "Sony", + "owner": "bae-403d7337-f73e-5c81-8719-e853938c8985" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, + Doc: `{ + "model": "Walkman", + "manufacturer": "The Proclaimers", + "owner": "bae-403d7337-f73e-5c81-8719-e853938c8985" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, + Doc: `{ + "model": "Running Man", + "manufacturer": "Braveworld Productions", + "owner": "bae-403d7337-f73e-5c81-8719-e853938c8985" + }`, }, testUtils.Request{ - Request: req1, + Request: req, Results: []map[string]any{ { - "name": "Islam", - "address": map[string]any{ - "city": "Munich", + "name": "Chris", + "devices": []map[string]any{ + { + "model": "Walkman", + "manufacturer": "Sony", + }, + { + "model": "Walkman", + "manufacturer": "The Proclaimers", + }, + { + "model": "Running Man", + "manufacturer": "Braveworld Productions", + }, }, }, }, }, testUtils.Request{ - Request: makeExplainQuery(req1), - Asserter: testUtils.NewExplainAsserter().WithFieldFetches(2).WithIndexFetches(1), + Request: makeExplainQuery(req), + // The invertable join does not support inverting one-many relations, so the index is + // not used. + Asserter: testUtils.NewExplainAsserter().WithFieldFetches(10).WithIndexFetches(0), + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestQueryWithIndexOnOneToOne_IfFilterOnIndexedRelation_ShouldFilter(t *testing.T) { + req := `query { + User(filter: { + address: {city: {_eq: "Munich"}} + }) { + name + address { + city + } + } + }` + test := testUtils.TestCase{ + Description: "Filter on indexed relation field in 1-1 relation", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + address: Address + } + + type Address { + user: User @primary + city: String @index + } + `, + }, + testUtils.CreatePredefinedDocs{ + Docs: getUserDocs(), }, testUtils.Request{ - Request: req2, + Request: req, Results: []map[string]any{ { - "name": "Chris", - "devices": map[string]any{ - "model": "Walkman", + "name": "Islam", + "address": map[string]any{ + "city": "Munich", }, }, }, }, testUtils.Request{ - Request: makeExplainQuery(req2), + Request: makeExplainQuery(req), Asserter: testUtils.NewExplainAsserter().WithFieldFetches(2).WithIndexFetches(1), }, }, diff --git a/tests/integration/index/query_with_unique_index_only_filter_test.go b/tests/integration/index/query_with_unique_index_only_filter_test.go index 08f1b1b927..23563335d4 100644 --- a/tests/integration/index/query_with_unique_index_only_filter_test.go +++ b/tests/integration/index/query_with_unique_index_only_filter_test.go @@ -558,6 +558,14 @@ func TestQueryWithUniqueIndex_WithEqualFilterOnNilValue_ShouldFetch(t *testing.T "name": "Alice" }`, }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: ` + { + "name": "Bob", + "age": 0 + }`, + }, testUtils.Request{ Request: ` query { @@ -575,6 +583,109 @@ func TestQueryWithUniqueIndex_WithEqualFilterOnNilValue_ShouldFetch(t *testing.T testUtils.ExecuteTestCase(t, test) } +func TestQueryWithUniqueIndex_WithEqualFilterOnZero_ShouldNotFetchNil(t *testing.T) { + test := testUtils.TestCase{ + Description: "Test index filtering with _eq filter on nil value", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + age: Int @index(unique: true) + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: ` + { + "name": "Alice" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: ` + { + "name": "Bob", + "age": 0 + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: ` + { + "name": "Kate", + "age": 33 + }`, + }, + testUtils.Request{ + Request: ` + query { + User(filter: {age: {_eq: 0}}) { + name + } + }`, + Results: []map[string]any{ + {"name": "Bob"}, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestQueryWithUniqueIndex_WithNotEqualFilterOnNilValue_ShouldFetch(t *testing.T) { + test := testUtils.TestCase{ + Description: "Test index filtering with _eq filter on nil value", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + age: Int @index(unique: true) + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: ` + { + "name": "Alice" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: ` + { + "name": "Kate", + "age": 0 + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: ` + { + "name": "Bob", + "age": 23 + }`, + }, + testUtils.Request{ + Request: ` + query { + User(filter: {age: {_ne: null}}) { + name + } + }`, + Results: []map[string]any{ + {"name": "Kate"}, + {"name": "Bob"}, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + func TestQueryWithUniqueIndex_WithMultipleNilValuesAndEqualFilter_ShouldFetch(t *testing.T) { test := testUtils.TestCase{ Description: "Test index filtering with _eq filter on nil value", diff --git a/tests/integration/index/update_unique_composite_test.go b/tests/integration/index/update_unique_composite_test.go new file mode 100644 index 0000000000..4621e79283 --- /dev/null +++ b/tests/integration/index/update_unique_composite_test.go @@ -0,0 +1,53 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package index + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestUniqueCompositeIndexUpdate_UponUpdatingDocWithExistingFieldValue_ShouldSucceed(t *testing.T) { + test := testUtils.TestCase{ + Description: "updating non-indexed fields on a doc with existing field combination for composite index should succeed", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User @index(unique: true, fields: ["name", "age"]) { + name: String + age: Int + email: String + } + `, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: ` + { + "name": "John", + "age": 21, + "email": "email@gmail.com" + }`, + }, + testUtils.UpdateDoc{ + CollectionID: 0, + DocID: 0, + Doc: ` + { + "email": "another@gmail.com" + }`, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/index/update_unique_test.go b/tests/integration/index/update_unique_test.go new file mode 100644 index 0000000000..c2743b313a --- /dev/null +++ b/tests/integration/index/update_unique_test.go @@ -0,0 +1,51 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package index + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestUniqueIndexUpdate_UponUpdatingDocNonIndexedField_ShouldSucceed(t *testing.T) { + test := testUtils.TestCase{ + Description: "updating non-indexed fields on a doc with a unique index should succeed", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String @index(unique: true) + age: Int + } + `, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: ` + { + "name": "Fred", + "age": 36 + }`, + }, + testUtils.UpdateDoc{ + CollectionID: 0, + DocID: 0, + Doc: ` + { + "age": 37 + }`, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/issues/2566_test.go b/tests/integration/issues/2566_test.go new file mode 100644 index 0000000000..696425a3b1 --- /dev/null +++ b/tests/integration/issues/2566_test.go @@ -0,0 +1,205 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package issues + +import ( + "fmt" + "math" + "testing" + + "github.com/sourcenetwork/immutable" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +// This test documents https://github.com/sourcenetwork/defradb/issues/2566 +func TestP2PUpdate_WithPNCounterSimultaneousOverflowIncrement_DoesNotReachConsitency(t *testing.T) { + test := testUtils.TestCase{ + SupportedClientTypes: immutable.Some( + []testUtils.ClientType{ + // This test only supports the Go client at the moment due to + // https://github.com/sourcenetwork/defradb/issues/2569 + testUtils.GoClientType, + }, + ), + Actions: []any{ + testUtils.RandomNetworkingConfig(), + testUtils.RandomNetworkingConfig(), + testUtils.SchemaUpdate{ + Schema: ` + type Users { + Name: String + Age: Float @crdt(type: "pncounter") + } + `, + }, + testUtils.CreateDoc{ + // Create John on all nodes + Doc: fmt.Sprintf(`{ + "Name": "John", + "Age": %g + }`, math.MaxFloat64/10), + }, + testUtils.UpdateDoc{ + NodeID: immutable.Some(0), + Doc: fmt.Sprintf(`{ + "Age": %g + }`, math.MaxFloat64), + }, + testUtils.UpdateDoc{ + NodeID: immutable.Some(1), + Doc: fmt.Sprintf(`{ + "Age": %g + }`, -math.MaxFloat64), + }, + testUtils.ConnectPeers{ + // Configure the peer connection after the document has been created and updated independently + // on each node. This allows us to be sure which update was applied on each node. + // If the connection was configured before the updates there would be a race condition resulting + // in a variable resultant state. + SourceNodeID: 0, + TargetNodeID: 1, + }, + testUtils.UpdateDoc{ + // This is an arbitrary update on both nodes to force the sync of the document created + // before the peer connection was configured. + Doc: `{ + "Name": "Fred" + }`, + }, + testUtils.WaitForSync{}, + testUtils.Request{ + NodeID: immutable.Some(0), + Request: `query { + Users { + Age + } + }`, + Results: []map[string]any{ + { + // Node 0 overflows before subtraction, and because subtracting from infinity + // results in infinity the value remains infinate + "Age": math.Inf(1), + }, + }, + }, + testUtils.Request{ + NodeID: immutable.Some(1), + Request: `query { + Users { + Age + } + }`, + Results: []map[string]any{ + { + // Node 1 subtracts before adding, meaning no overflow is achieved and the value + // remains finate + "Age": float64(1.7976931348623155e+307), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +// This test documents https://github.com/sourcenetwork/defradb/issues/2566 +func TestP2PUpdate_WithPNCounterSimultaneousOverflowDecrement_DoesNotReachConsitency(t *testing.T) { + test := testUtils.TestCase{ + SupportedClientTypes: immutable.Some( + []testUtils.ClientType{ + // This test only supports the Go client at the moment due to + // https://github.com/sourcenetwork/defradb/issues/2569 + testUtils.GoClientType, + }, + ), + Actions: []any{ + testUtils.RandomNetworkingConfig(), + testUtils.RandomNetworkingConfig(), + testUtils.SchemaUpdate{ + Schema: ` + type Users { + Name: String + Age: Float @crdt(type: "pncounter") + } + `, + }, + testUtils.CreateDoc{ + // Create John on all nodes + Doc: fmt.Sprintf(`{ + "Name": "John", + "Age": %g + }`, -math.MaxFloat64/10), + }, + testUtils.UpdateDoc{ + NodeID: immutable.Some(1), + Doc: fmt.Sprintf(`{ + "Age": %g + }`, math.MaxFloat64), + }, + testUtils.UpdateDoc{ + NodeID: immutable.Some(0), + Doc: fmt.Sprintf(`{ + "Age": %g + }`, -math.MaxFloat64), + }, + testUtils.ConnectPeers{ + // Configure the peer connection after the document has been created and updated independently + // on each node. This allows us to be sure which update was applied on each node. + // If the connection was configured before the updates there would be a race condition resulting + // in a variable resultant state. + SourceNodeID: 0, + TargetNodeID: 1, + }, + testUtils.UpdateDoc{ + // This is an arbitrary update on both nodes to force the sync of the document created + // before the peer connection was configured. + Doc: `{ + "Name": "Fred" + }`, + }, + testUtils.WaitForSync{}, + testUtils.Request{ + NodeID: immutable.Some(0), + Request: `query { + Users { + Age + } + }`, + Results: []map[string]any{ + { + // Node 0 overflows before addition, and because adding to infinity + // results in infinity the value remains infinate + "Age": math.Inf(-1), + }, + }, + }, + testUtils.Request{ + NodeID: immutable.Some(1), + Request: `query { + Users { + Age + } + }`, + Results: []map[string]any{ + { + // Node 1 adds before subtracting, meaning no overflow is achieved and the value + // remains finate + "Age": float64(-1.7976931348623155e+307), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/issues/2569_test.go b/tests/integration/issues/2569_test.go new file mode 100644 index 0000000000..2d942177d6 --- /dev/null +++ b/tests/integration/issues/2569_test.go @@ -0,0 +1,167 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package issues + +import ( + "fmt" + "math" + "testing" + + "github.com/sourcenetwork/immutable" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +// These tests document https://github.com/sourcenetwork/defradb/issues/2569 + +func TestP2PUpdate_WithPNCounterFloatOverflowIncrement_PreventsQuerying(t *testing.T) { + test := testUtils.TestCase{ + SupportedClientTypes: immutable.Some( + []testUtils.ClientType{ + // This issue only affects the http and the cli clients + testUtils.HTTPClientType, + testUtils.CLIClientType, + }, + ), + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + points: Float @crdt(type: "pncounter") + } + `, + }, + testUtils.CreateDoc{ + Doc: fmt.Sprintf(`{ + "name": "John", + "points": %g + }`, math.MaxFloat64), + }, + testUtils.UpdateDoc{ + // Overflow the points field, this results in a value of `math.Inf(1)` + Doc: fmt.Sprintf(`{ + "points": %g + }`, math.MaxFloat64/10), + }, + testUtils.Request{ + Request: `query { + Users { + name + points + } + }`, + ExpectedError: "unexpected end of JSON input", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestP2PUpdate_WithPNCounterFloatOverflowDecrement_PreventsQuerying(t *testing.T) { + test := testUtils.TestCase{ + SupportedClientTypes: immutable.Some( + []testUtils.ClientType{ + // This issue only affects the http and the cli clients + testUtils.HTTPClientType, + testUtils.CLIClientType, + }, + ), + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + points: Float @crdt(type: "pncounter") + } + `, + }, + testUtils.CreateDoc{ + Doc: fmt.Sprintf(`{ + "name": "John", + "points": %g + }`, -math.MaxFloat64), + }, + testUtils.UpdateDoc{ + // Overflow the points field, this results in a value of `math.Inf(-1)` + Doc: fmt.Sprintf(`{ + "points": %g + }`, -math.MaxFloat64/10), + }, + testUtils.Request{ + Request: `query { + Users { + name + points + } + }`, + ExpectedError: "unexpected end of JSON input", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestP2PUpdate_WithPNCounterFloatOverflow_PreventsCollectionGet(t *testing.T) { + test := testUtils.TestCase{ + SupportedClientTypes: immutable.Some( + []testUtils.ClientType{ + // This issue only affects the http and the cli clients + testUtils.HTTPClientType, + testUtils.CLIClientType, + }, + ), + SupportedMutationTypes: immutable.Some( + []testUtils.MutationType{ + // We limit the test to Collection mutation calls, as the test framework + // will make a `Get` call before submitting the document, which is where the error + // will surface (not the update itelf) + testUtils.CollectionSaveMutationType, + testUtils.CollectionNamedMutationType, + }, + ), + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + points: Float @crdt(type: "pncounter") + } + `, + }, + testUtils.CreateDoc{ + Doc: fmt.Sprintf(`{ + "name": "John", + "points": %g + }`, math.MaxFloat64), + }, + testUtils.UpdateDoc{ + // Overflow the points field, this results in a value of `math.Inf(1)` + Doc: fmt.Sprintf(`{ + "points": %g + }`, math.MaxFloat64/10), + }, + testUtils.UpdateDoc{ + // Try and update the document again, the value used does not matter. + Doc: `{ + "points": 1 + }`, + // WARNING: This error is just an artifact of our test harness, what actually happens + // is the test harness calls `collection.Get`, which returns an empty string and no error. + ExpectedError: "cannot parse JSON: cannot parse empty string", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/issues/README.md b/tests/integration/issues/README.md new file mode 100644 index 0000000000..c9405853bc --- /dev/null +++ b/tests/integration/issues/README.md @@ -0,0 +1,5 @@ +# Issues + +This directory hosts tests documenting known issues. Test files are named after their corresponding GitHub issue ("\[IssueNumber\]_test.go"). + +Ideally the only file in this directory would be this readme. diff --git a/tests/integration/lens.go b/tests/integration/lens.go index 69c49a1cbc..541b708a33 100644 --- a/tests/integration/lens.go +++ b/tests/integration/lens.go @@ -14,6 +14,7 @@ import ( "github.com/sourcenetwork/immutable" "github.com/sourcenetwork/defradb/client" + "github.com/sourcenetwork/defradb/db" ) // ConfigureMigration is a test action which will configure a Lens migration using the @@ -42,9 +43,10 @@ func configureMigration( action ConfigureMigration, ) { for _, node := range getNodes(action.NodeID, s.nodes) { - db := getStore(s, node, action.TransactionID, action.ExpectedError) + txn := getTransaction(s, node, action.TransactionID, action.ExpectedError) + ctx := db.SetContextTxn(s.ctx, txn) - err := db.SetMigration(s.ctx, action.LensConfig) + err := node.SetMigration(ctx, action.LensConfig) expectedErrorRaised := AssertError(s.t, s.testCase.Description, err, action.ExpectedError) assertExpectedErrorRaised(s.t, s.testCase.Description, action.ExpectedError, expectedErrorRaised) diff --git a/tests/integration/mutation/create/crdt/pcounter_test.go b/tests/integration/mutation/create/crdt/pcounter_test.go new file mode 100644 index 0000000000..681ca2ec76 --- /dev/null +++ b/tests/integration/mutation/create/crdt/pcounter_test.go @@ -0,0 +1,57 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package create + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestPCounterCreate_IntKindWithPositiveValue_NoError(t *testing.T) { + test := testUtils.TestCase{ + Description: "Document creation with P Counter", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + points: Int @crdt(type: "pcounter") + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "points": 10 + }`, + }, + testUtils.Request{ + Request: `query { + Users { + _docID + name + points + } + }`, + Results: []map[string]any{ + { + "_docID": "bae-a688789e-d8a6-57a7-be09-22e005ab79e0", + "name": "John", + "points": int64(10), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/mutation/create/field_kinds/one_to_one/with_alias_test.go b/tests/integration/mutation/create/field_kinds/one_to_one/with_alias_test.go index 18d4a2e13c..16da55ce78 100644 --- a/tests/integration/mutation/create/field_kinds/one_to_one/with_alias_test.go +++ b/tests/integration/mutation/create/field_kinds/one_to_one/with_alias_test.go @@ -82,7 +82,7 @@ func TestMutationCreateOneToOne_UseAliasWithNonExistingRelationSecondarySide_Err "name": "Painted House", "author": "bae-fd541c25-229e-5280-b44b-e5c2af3e374d" }`, - ExpectedError: "no document for the given ID exists", + ExpectedError: "document not found or not authorized to access", }, }, } diff --git a/tests/integration/mutation/create/field_kinds/one_to_one/with_simple_test.go b/tests/integration/mutation/create/field_kinds/one_to_one/with_simple_test.go index 30545d6e7c..c693b05187 100644 --- a/tests/integration/mutation/create/field_kinds/one_to_one/with_simple_test.go +++ b/tests/integration/mutation/create/field_kinds/one_to_one/with_simple_test.go @@ -82,7 +82,7 @@ func TestMutationCreateOneToOne_NonExistingRelationSecondarySide_Error(t *testin "name": "Painted House", "author_id": "bae-fd541c25-229e-5280-b44b-e5c2af3e374d" }`, - ExpectedError: "no document for the given ID exists", + ExpectedError: "document not found or not authorized to access", }, }, } diff --git a/tests/integration/mutation/create/field_kinds/one_to_one_to_one/utils.go b/tests/integration/mutation/create/field_kinds/one_to_one_to_one/utils.go index 9fce31fdb2..896fb1c5eb 100644 --- a/tests/integration/mutation/create/field_kinds/one_to_one_to_one/utils.go +++ b/tests/integration/mutation/create/field_kinds/one_to_one_to_one/utils.go @@ -29,7 +29,7 @@ func execute(t *testing.T, test testUtils.TestCase) { name: String rating: Float author: Author - publisher: Publisher + publisher: Publisher @primary } type Author { diff --git a/tests/integration/mutation/create/with_version_test.go b/tests/integration/mutation/create/with_version_test.go index 958dc113f1..943916f1ed 100644 --- a/tests/integration/mutation/create/with_version_test.go +++ b/tests/integration/mutation/create/with_version_test.go @@ -39,7 +39,7 @@ func TestMutationCreate_ReturnsVersionCID(t *testing.T) { { "_version": []map[string]any{ { - "cid": "bafybeidzstxabh7qktq7pkmmxvpjbnwklxz3h5l6d425ldvjy65xvvuxu4", + "cid": "bafybeif757a4mdwimqwl24ujjnao6xlajiajz2hwuleopnptusuttri6zu", }, }, }, diff --git a/tests/integration/mutation/delete/field_kinds/one_to_many/with_show_deleted_test.go b/tests/integration/mutation/delete/field_kinds/one_to_many/with_show_deleted_test.go index bee050d1ae..260a9a7b70 100644 --- a/tests/integration/mutation/delete/field_kinds/one_to_many/with_show_deleted_test.go +++ b/tests/integration/mutation/delete/field_kinds/one_to_many/with_show_deleted_test.go @@ -41,7 +41,7 @@ func TestDeletionOfADocumentUsingSingleDocIDWithShowDeletedDocumentQuery(t *test "name": "John", "age": 30 }` - doc1, err := client.NewDocFromJSON([]byte(jsonString1), colDefMap["Author"].Schema) + doc1, err := client.NewDocFromJSON([]byte(jsonString1), colDefMap["Author"]) require.NoError(t, err) jsonString2 := fmt.Sprintf(`{ @@ -49,7 +49,7 @@ func TestDeletionOfADocumentUsingSingleDocIDWithShowDeletedDocumentQuery(t *test "rating": 9.9, "author_id": "%s" }`, doc1.ID()) - doc2, err := client.NewDocFromJSON([]byte(jsonString2), colDefMap["Book"].Schema) + doc2, err := client.NewDocFromJSON([]byte(jsonString2), colDefMap["Book"]) require.NoError(t, err) jsonString3 := fmt.Sprintf(`{ diff --git a/tests/integration/mutation/delete/field_kinds/one_to_one_to_one/utils.go b/tests/integration/mutation/delete/field_kinds/one_to_one_to_one/utils.go index 89f0e497f4..131a7194fe 100644 --- a/tests/integration/mutation/delete/field_kinds/one_to_one_to_one/utils.go +++ b/tests/integration/mutation/delete/field_kinds/one_to_one_to_one/utils.go @@ -29,7 +29,7 @@ func execute(t *testing.T, test testUtils.TestCase) { name: String rating: Float author: Author - publisher: Publisher + publisher: Publisher @primary } type Author { diff --git a/tests/integration/mutation/update/crdt/pcounter_test.go b/tests/integration/mutation/update/crdt/pcounter_test.go new file mode 100644 index 0000000000..c4ff85e8b4 --- /dev/null +++ b/tests/integration/mutation/update/crdt/pcounter_test.go @@ -0,0 +1,265 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package update + +import ( + "fmt" + "math" + "testing" + + "github.com/sourcenetwork/immutable" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestPCounterUpdate_IntKindWithNegativeIncrement_ShouldError(t *testing.T) { + test := testUtils.TestCase{ + Description: "Positive increments of a P Counter with Int type", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + points: Int @crdt(type: "pcounter") + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "points": 0 + }`, + }, + testUtils.UpdateDoc{ + DocID: 0, + Doc: `{ + "points": -10 + }`, + ExpectedError: "value cannot be negative", + }, + testUtils.Request{ + Request: `query { + Users { + name + points + } + }`, + Results: []map[string]any{ + { + "name": "John", + "points": int64(0), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestPCounterUpdate_IntKindWithPositiveIncrement_ShouldIncrement(t *testing.T) { + test := testUtils.TestCase{ + Description: "Positive increments of a P Counter with Int type", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + points: Int @crdt(type: "pcounter") + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "points": 0 + }`, + }, + testUtils.UpdateDoc{ + DocID: 0, + Doc: `{ + "points": 10 + }`, + }, + testUtils.UpdateDoc{ + DocID: 0, + Doc: `{ + "points": 10 + }`, + }, + testUtils.Request{ + Request: `query { + Users { + name + points + } + }`, + Results: []map[string]any{ + { + "name": "John", + "points": int64(20), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +// This test documents what happens when an overflow occurs in a P Counter with Int type. +func TestPCounterUpdate_IntKindWithPositiveIncrementOverflow_RollsOverToMinInt64(t *testing.T) { + test := testUtils.TestCase{ + Description: "Positive increments of a P Counter with Int type causing overflow behaviour", + SupportedMutationTypes: immutable.Some([]testUtils.MutationType{ + // GQL mutation will return a type error in this case + // because we are testing the internal overflow behaviour with + // a int64 but the GQL Int type is an int32. + testUtils.CollectionNamedMutationType, + testUtils.CollectionSaveMutationType, + }), + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + points: Int @crdt(type: "pcounter") + } + `, + }, + testUtils.CreateDoc{ + Doc: fmt.Sprintf(`{ + "name": "John", + "points": %d + }`, math.MaxInt64), + }, + testUtils.UpdateDoc{ + DocID: 0, + Doc: `{ + "points": 1 + }`, + }, + testUtils.Request{ + Request: `query { + Users { + name + points + } + }`, + Results: []map[string]any{ + { + "name": "John", + "points": int64(math.MinInt64), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestPCounterUpdate_FloatKindWithPositiveIncrement_ShouldIncrement(t *testing.T) { + test := testUtils.TestCase{ + Description: "Positive increments of a P Counter with Float type. Note the lack of precision", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + points: Float @crdt(type: "pcounter") + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "points": 0 + }`, + }, + testUtils.UpdateDoc{ + DocID: 0, + Doc: `{ + "points": 10.1 + }`, + }, + testUtils.UpdateDoc{ + DocID: 0, + Doc: `{ + "points": 10.2 + }`, + }, + testUtils.Request{ + Request: `query { + Users { + name + points + } + }`, + Results: []map[string]any{ + { + "name": "John", + // Note the lack of precision of float types. + "points": 20.299999999999997, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +// This test documents what happens when an overflow occurs in a P Counter with Float type. +// In this case it is the same as a no-op. +func TestPCounterUpdate_FloatKindWithPositiveIncrementOverflow_NoOp(t *testing.T) { + test := testUtils.TestCase{ + Description: "Positive increments of a P Counter with Float type and overflow causing a no-op", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + points: Float @crdt(type: "pcounter") + } + `, + }, + testUtils.CreateDoc{ + Doc: fmt.Sprintf(`{ + "name": "John", + "points": %g + }`, math.MaxFloat64), + }, + testUtils.UpdateDoc{ + DocID: 0, + Doc: `{ + "points": 1000 + }`, + }, + testUtils.Request{ + Request: `query { + Users { + name + points + } + }`, + Results: []map[string]any{ + { + "name": "John", + "points": math.MaxFloat64, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/mutation/update/crdt/pncounter_test.go b/tests/integration/mutation/update/crdt/pncounter_test.go index f8ede1cffc..534bd406dc 100644 --- a/tests/integration/mutation/update/crdt/pncounter_test.go +++ b/tests/integration/mutation/update/crdt/pncounter_test.go @@ -75,8 +75,9 @@ func TestPNCounterUpdate_IntKindWithPositiveIncrementOverflow_RollsOverToMinInt6 test := testUtils.TestCase{ Description: "Positive increments of a PN Counter with Int type causing overflow behaviour", SupportedMutationTypes: immutable.Some([]testUtils.MutationType{ - // GQL mutation will return an error - // when integer type overflows + // GQL mutation will return a type error in this case + // because we are testing the internal overflow behaviour with + // a int64 but the GQL Int type is an int32. testUtils.CollectionNamedMutationType, testUtils.CollectionSaveMutationType, }), @@ -173,10 +174,16 @@ func TestPNCounterUpdate_FloatKindWithPositiveIncrement_ShouldIncrement(t *testi } // This test documents what happens when an overflow occurs in a PN Counter with Float type. -// In this case it is the same as a no-op. -func TestPNCounterUpdate_FloatKindWithPositiveIncrementOverflow_NoOp(t *testing.T) { +func TestPNCounterUpdate_FloatKindWithPositiveIncrementOverflow_PositiveInf(t *testing.T) { test := testUtils.TestCase{ - Description: "Positive increments of a PN Counter with Float type and overflow causing a no-op", + Description: "Positive increments of a PN Counter with Float type and overflow", + SupportedClientTypes: immutable.Some( + []testUtils.ClientType{ + // This test only supports the Go client at the moment due to + // https://github.com/sourcenetwork/defradb/issues/2569 + testUtils.GoClientType, + }, + ), Actions: []any{ testUtils.SchemaUpdate{ Schema: ` @@ -194,8 +201,105 @@ func TestPNCounterUpdate_FloatKindWithPositiveIncrementOverflow_NoOp(t *testing. }, testUtils.UpdateDoc{ DocID: 0, + Doc: fmt.Sprintf(`{ + "points": %g + }`, math.MaxFloat64/10), + }, + testUtils.Request{ + Request: `query { + Users { + name + points + } + }`, + Results: []map[string]any{ + { + "name": "John", + "points": math.Inf(1), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +// This test documents what happens when an overflow occurs in a PN Counter with Float type. +func TestPNCounterUpdate_FloatKindWithDecrementOverflow_NegativeInf(t *testing.T) { + test := testUtils.TestCase{ + Description: "Positive increments of a PN Counter with Float type and overflow", + SupportedClientTypes: immutable.Some( + []testUtils.ClientType{ + // This test only supports the Go client at the moment due to + // https://github.com/sourcenetwork/defradb/issues/2569 + testUtils.GoClientType, + }, + ), + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + points: Float @crdt(type: "pncounter") + } + `, + }, + testUtils.CreateDoc{ + Doc: fmt.Sprintf(`{ + "name": "John", + "points": %g + }`, -math.MaxFloat64), + }, + testUtils.UpdateDoc{ + DocID: 0, + Doc: fmt.Sprintf(`{ + "points": %g + }`, -math.MaxFloat64/10), + }, + testUtils.Request{ + Request: `query { + Users { + name + points + } + }`, + Results: []map[string]any{ + { + "name": "John", + "points": math.Inf(-1), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestPNCounterUpdate_FloatKindWithPositiveIncrementInsignificantValue_DoesNothing(t *testing.T) { + test := testUtils.TestCase{ + Description: "Positive increments of a PN Counter with Float type and an insignificant value", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + points: Float @crdt(type: "pncounter") + } + `, + }, + testUtils.CreateDoc{ + Doc: fmt.Sprintf(`{ + "name": "John", + "points": %g + }`, math.MaxFloat64/10), + }, + testUtils.UpdateDoc{ + // `1` is insignificant to a large float64 and adding it to the large value + // should not result in a value change Doc: `{ - "points": 1000 + "points": 1 }`, }, testUtils.Request{ @@ -208,7 +312,7 @@ func TestPNCounterUpdate_FloatKindWithPositiveIncrementOverflow_NoOp(t *testing. Results: []map[string]any{ { "name": "John", - "points": math.MaxFloat64, + "points": math.MaxFloat64 / 10, }, }, }, diff --git a/tests/integration/mutation/update/field_kinds/one_to_many/with_alias_test.go b/tests/integration/mutation/update/field_kinds/one_to_many/with_alias_test.go index 751ca67b78..d3df327de2 100644 --- a/tests/integration/mutation/update/field_kinds/one_to_many/with_alias_test.go +++ b/tests/integration/mutation/update/field_kinds/one_to_many/with_alias_test.go @@ -65,7 +65,7 @@ func TestMutationUpdateOneToMany_AliasRelationNameToLinkFromSingleSide_Collectio }`, bookID, ), - ExpectedError: "The given field or alias to field does not exist. Name: published", + ExpectedError: "The given field does not exist. Name: published", }, }, } @@ -118,7 +118,7 @@ func TestMutationUpdateOneToMany_AliasRelationNameToLinkFromSingleSide_GQL(t *te }`, bookID, ), - ExpectedError: "The given field or alias to field does not exist. Name: published", + ExpectedError: "The given field does not exist. Name: published", }, }, } diff --git a/tests/integration/mutation/update/field_kinds/one_to_one/with_alias_test.go b/tests/integration/mutation/update/field_kinds/one_to_one/with_alias_test.go index fdb8928964..39e132a6c6 100644 --- a/tests/integration/mutation/update/field_kinds/one_to_one/with_alias_test.go +++ b/tests/integration/mutation/update/field_kinds/one_to_one/with_alias_test.go @@ -184,7 +184,7 @@ func TestMutationUpdateOneToOne_InvalidAliasRelationNameToLinkFromSecondarySide_ }`, invalidAuthorID, ), - ExpectedError: "no document for the given ID exists", + ExpectedError: "document not found or not authorized to access", }, }, } diff --git a/tests/integration/mutation/update/field_kinds/one_to_one/with_simple_test.go b/tests/integration/mutation/update/field_kinds/one_to_one/with_simple_test.go index 6d38a9914d..0c05734204 100644 --- a/tests/integration/mutation/update/field_kinds/one_to_one/with_simple_test.go +++ b/tests/integration/mutation/update/field_kinds/one_to_one/with_simple_test.go @@ -368,7 +368,7 @@ func TestMutationUpdateOneToOne_InvalidRelationIDToLinkFromSecondarySide_Error(t }`, invalidAuthorID, ), - ExpectedError: "no document for the given ID exists", + ExpectedError: "document not found or not authorized to access", }, }, } diff --git a/tests/integration/net/order/tcp_test.go b/tests/integration/net/order/tcp_test.go index f80701c64c..ef18668d20 100644 --- a/tests/integration/net/order/tcp_test.go +++ b/tests/integration/net/order/tcp_test.go @@ -17,15 +17,15 @@ import ( "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/net" - testutils "github.com/sourcenetwork/defradb/tests/integration" + testUtils "github.com/sourcenetwork/defradb/tests/integration" ) // TestP2PWithSingleDocumentUpdatePerNode tests document syncing between two nodes with a single update per node func TestP2PWithSingleDocumentUpdatePerNode(t *testing.T) { test := P2PTestCase{ NodeConfig: [][]net.NodeOpt{ - testutils.RandomNetworkingConfig()(), - testutils.RandomNetworkingConfig()(), + testUtils.RandomNetworkingConfig()(), + testUtils.RandomNetworkingConfig()(), }, NodePeers: map[int][]int{ 1: { @@ -75,8 +75,8 @@ func TestP2PWithSingleDocumentUpdatePerNode(t *testing.T) { func TestP2PWithMultipleDocumentUpdatesPerNode(t *testing.T) { test := P2PTestCase{ NodeConfig: [][]net.NodeOpt{ - testutils.RandomNetworkingConfig()(), - testutils.RandomNetworkingConfig()(), + testUtils.RandomNetworkingConfig()(), + testUtils.RandomNetworkingConfig()(), }, NodePeers: map[int][]int{ 1: { @@ -136,18 +136,18 @@ func TestP2PWithMultipleDocumentUpdatesPerNode(t *testing.T) { // TestP2FullPReplicator tests document syncing between a node and a replicator. func TestP2FullPReplicator(t *testing.T) { - colDefMap, err := testutils.ParseSDL(userCollectionGQLSchema) + colDefMap, err := testUtils.ParseSDL(userCollectionGQLSchema) require.NoError(t, err) doc, err := client.NewDocFromJSON([]byte(`{ "Name": "John", "Age": 21 - }`), colDefMap[userCollection].Schema) + }`), colDefMap[userCollection]) require.NoError(t, err) test := P2PTestCase{ NodeConfig: [][]net.NodeOpt{ - testutils.RandomNetworkingConfig()(), - testutils.RandomNetworkingConfig()(), + testUtils.RandomNetworkingConfig()(), + testUtils.RandomNetworkingConfig()(), }, NodeReplicators: map[int][]int{ 0: { diff --git a/tests/integration/net/order/utils.go b/tests/integration/net/order/utils.go index 3ba5fc7f26..2373037b62 100644 --- a/tests/integration/net/order/utils.go +++ b/tests/integration/net/order/utils.go @@ -15,20 +15,20 @@ import ( "fmt" "testing" + "github.com/sourcenetwork/corelog" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "github.com/sourcenetwork/defradb/client" coreDB "github.com/sourcenetwork/defradb/db" "github.com/sourcenetwork/defradb/errors" - "github.com/sourcenetwork/defradb/logging" "github.com/sourcenetwork/defradb/net" netutils "github.com/sourcenetwork/defradb/net/utils" testutils "github.com/sourcenetwork/defradb/tests/integration" ) var ( - log = logging.MustNewLogger("test.net") + log = corelog.NewLogger("test.net") ) const ( @@ -47,6 +47,11 @@ const ( type P2PTestCase struct { Query string + + // The identity for all requests. + // TODO-ACP: https://github.com/sourcenetwork/defradb/issues/2366 - Improve in ACP <> P2P implementation + Identity string + // Configuration parameters for each peer NodeConfig [][]net.NodeOpt @@ -75,7 +80,7 @@ func setupDefraNode( ) (*net.Node, []client.DocID, error) { ctx := context.Background() - log.Info(ctx, "Building new memory store") + log.InfoContext(ctx, "Building new memory store") db, err := testutils.NewBadgerMemoryDB(ctx, coreDB.WithUpdateEvents()) if err != nil { return nil, nil, err @@ -102,16 +107,16 @@ func setupDefraNode( // parse peers and bootstrap if len(peers) != 0 { - log.Info(ctx, "Parsing bootstrap peers", logging.NewKV("Peers", peers)) + log.InfoContext(ctx, "Parsing bootstrap peers", corelog.Any("Peers", peers)) addrs, err := netutils.ParsePeers(peers) if err != nil { return nil, nil, errors.Wrap(fmt.Sprintf("failed to parse bootstrap peers %v", peers), err) } - log.Info(ctx, "Bootstrapping with peers", logging.NewKV("Addresses", addrs)) + log.InfoContext(ctx, "Bootstrapping with peers", corelog.Any("Addresses", addrs)) n.Bootstrap(addrs) } - log.Info(ctx, "Starting P2P node", logging.NewKV("P2P addresses", n.PeerInfo().Addrs)) + log.InfoContext(ctx, "Starting P2P node", corelog.Any("P2P addresses", n.PeerInfo().Addrs)) if err := n.Start(); err != nil { n.Close() return nil, nil, errors.Wrap("unable to start P2P listeners", err) @@ -125,13 +130,17 @@ func seedSchema(ctx context.Context, db client.DB) error { return err } -func seedDocument(ctx context.Context, db client.DB, document string) (client.DocID, error) { +func seedDocument( + ctx context.Context, + db client.DB, + document string, +) (client.DocID, error) { col, err := db.GetCollectionByName(ctx, userCollection) if err != nil { return client.DocID{}, err } - doc, err := client.NewDocFromJSON([]byte(document), col.Schema()) + doc, err := client.NewDocFromJSON([]byte(document), col.Definition()) if err != nil { return client.DocID{}, err } @@ -144,7 +153,11 @@ func seedDocument(ctx context.Context, db client.DB, document string) (client.Do return doc.ID(), nil } -func saveDocument(ctx context.Context, db client.DB, document *client.Document) error { +func saveDocument( + ctx context.Context, + db client.DB, + document *client.Document, +) error { col, err := db.GetCollectionByName(ctx, userCollection) if err != nil { return err @@ -153,7 +166,12 @@ func saveDocument(ctx context.Context, db client.DB, document *client.Document) return col.Save(ctx, document) } -func updateDocument(ctx context.Context, db client.DB, docID client.DocID, update string) error { +func updateDocument( + ctx context.Context, + db client.DB, + docID client.DocID, + update string, +) error { col, err := db.GetCollectionByName(ctx, userCollection) if err != nil { return err @@ -171,7 +189,11 @@ func updateDocument(ctx context.Context, db client.DB, docID client.DocID, updat return col.Save(ctx, doc) } -func getDocument(ctx context.Context, db client.DB, docID client.DocID) (*client.Document, error) { +func getDocument( + ctx context.Context, + db client.DB, + docID client.DocID, +) (*client.Document, error) { col, err := db.GetCollectionByName(ctx, userCollection) if err != nil { return nil, err @@ -191,12 +213,12 @@ func executeTestCase(t *testing.T, test P2PTestCase) { nodes := []*net.Node{} for i, cfg := range test.NodeConfig { - log.Info(ctx, fmt.Sprintf("Setting up node %d", i)) + log.InfoContext(ctx, fmt.Sprintf("Setting up node %d", i)) var peerAddresses []string if peers, ok := test.NodePeers[i]; ok { for _, p := range peers { if p >= len(nodes) { - log.Info(ctx, "cannot set a peer that hasn't been started. Skipping to next peer") + log.InfoContext(ctx, "cannot set a peer that hasn't been started. Skipping to next peer") continue } peerInfo := nodes[p].PeerInfo() @@ -206,7 +228,12 @@ func executeTestCase(t *testing.T, test P2PTestCase) { ) } } - n, d, err := setupDefraNode(t, cfg, peerAddresses, test.SeedDocuments) + n, d, err := setupDefraNode( + t, + cfg, + peerAddresses, + test.SeedDocuments, + ) require.NoError(t, err) if i == 0 { @@ -226,10 +253,10 @@ func executeTestCase(t *testing.T, test P2PTestCase) { if i == j { continue } - log.Info(ctx, fmt.Sprintf("Waiting for node %d to connect with peer %d", i, j)) + log.InfoContext(ctx, fmt.Sprintf("Waiting for node %d to connect with peer %d", i, j)) err := n.WaitForPubSubEvent(p.PeerID()) require.NoError(t, err) - log.Info(ctx, fmt.Sprintf("Node %d connected to peer %d", i, j)) + log.InfoContext(ctx, fmt.Sprintf("Node %d connected to peer %d", i, j)) } } } @@ -237,14 +264,19 @@ func executeTestCase(t *testing.T, test P2PTestCase) { // update and sync peers for n, updateMap := range test.Updates { if n >= len(nodes) { - log.Info(ctx, "cannot update a node that hasn't been started. Skipping to next node") + log.InfoContext(ctx, "cannot update a node that hasn't been started. Skipping to next node") continue } for d, updates := range updateMap { for _, update := range updates { - log.Info(ctx, fmt.Sprintf("Updating node %d with update %d", n, d)) - err := updateDocument(ctx, nodes[n].DB, docIDs[d], update) + log.InfoContext(ctx, fmt.Sprintf("Updating node %d with update %d", n, d)) + err := updateDocument( + ctx, + nodes[n].DB, + docIDs[d], + update, + ) require.NoError(t, err) // wait for peers to sync @@ -252,10 +284,10 @@ func executeTestCase(t *testing.T, test P2PTestCase) { if n2 == n { continue } - log.Info(ctx, fmt.Sprintf("Waiting for node %d to sync with peer %d", n2, n)) + log.InfoContext(ctx, fmt.Sprintf("Waiting for node %d to sync with peer %d", n2, n)) err := p.WaitForPushLogByPeerEvent(nodes[n].PeerInfo().ID) require.NoError(t, err) - log.Info(ctx, fmt.Sprintf("Node %d synced", n2)) + log.InfoContext(ctx, fmt.Sprintf("Node %d synced", n2)) } } } @@ -266,13 +298,17 @@ func executeTestCase(t *testing.T, test P2PTestCase) { continue } if n2 >= len(nodes) { - log.Info(ctx, "cannot check results of a node that hasn't been started. Skipping to next node") + log.InfoContext(ctx, "cannot check results of a node that hasn't been started. Skipping to next node") continue } for d, results := range resultsMap { for field, result := range results { - doc, err := getDocument(ctx, nodes[n2].DB, docIDs[d]) + doc, err := getDocument( + ctx, + nodes[n2].DB, + docIDs[d], + ) require.NoError(t, err) val, err := doc.Get(field) @@ -304,21 +340,29 @@ func executeTestCase(t *testing.T, test P2PTestCase) { if len(test.DocumentsToReplicate) > 0 { for n, reps := range test.NodeReplicators { for _, doc := range test.DocumentsToReplicate { - err := saveDocument(ctx, nodes[n].DB, doc) + err := saveDocument( + ctx, + nodes[n].DB, + doc, + ) require.NoError(t, err) } for _, rep := range reps { - log.Info(ctx, fmt.Sprintf("Waiting for node %d to sync with peer %d", rep, n)) + log.InfoContext(ctx, fmt.Sprintf("Waiting for node %d to sync with peer %d", rep, n)) err := nodes[rep].WaitForPushLogByPeerEvent(nodes[n].PeerID()) require.NoError(t, err) - log.Info(ctx, fmt.Sprintf("Node %d synced", rep)) + log.InfoContext(ctx, fmt.Sprintf("Node %d synced", rep)) for docID, results := range test.ReplicatorResult[rep] { for field, result := range results { d, err := client.NewDocIDFromString(docID) require.NoError(t, err) - doc, err := getDocument(ctx, nodes[rep].DB, d) + doc, err := getDocument( + ctx, + nodes[rep].DB, + d, + ) require.NoError(t, err) val, err := doc.Get(field) diff --git a/tests/integration/net/state/simple/peer/crdt/pcounter_test.go b/tests/integration/net/state/simple/peer/crdt/pcounter_test.go new file mode 100644 index 0000000000..963b7d54cd --- /dev/null +++ b/tests/integration/net/state/simple/peer/crdt/pcounter_test.go @@ -0,0 +1,124 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package peer_test + +import ( + "testing" + + "github.com/sourcenetwork/immutable" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestP2PUpdate_WithPCounter_NoError(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.RandomNetworkingConfig(), + testUtils.RandomNetworkingConfig(), + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + points: Int @crdt(type: "pcounter") + } + `, + }, + testUtils.CreateDoc{ + // Create Shahzad on all nodes + Doc: `{ + "name": "Shahzad", + "points": 10 + }`, + }, + testUtils.ConnectPeers{ + SourceNodeID: 1, + TargetNodeID: 0, + }, + testUtils.UpdateDoc{ + NodeID: immutable.Some(0), + DocID: 0, + Doc: `{ + "points": 10 + }`, + }, + testUtils.WaitForSync{}, + testUtils.Request{ + Request: `query { + Users { + points + } + }`, + Results: []map[string]any{ + { + "points": int64(20), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestP2PUpdate_WithPCounterSimultaneousUpdate_NoError(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.RandomNetworkingConfig(), + testUtils.RandomNetworkingConfig(), + testUtils.SchemaUpdate{ + Schema: ` + type Users { + Name: String + Age: Int @crdt(type: "pcounter") + } + `, + }, + testUtils.CreateDoc{ + // Create John on all nodes + Doc: `{ + "Name": "John", + "Age": 0 + }`, + }, + testUtils.ConnectPeers{ + SourceNodeID: 0, + TargetNodeID: 1, + }, + testUtils.UpdateDoc{ + NodeID: immutable.Some(0), + Doc: `{ + "Age": 45 + }`, + }, + testUtils.UpdateDoc{ + NodeID: immutable.Some(1), + Doc: `{ + "Age": 45 + }`, + }, + testUtils.WaitForSync{}, + testUtils.Request{ + Request: `query { + Users { + Age + } + }`, + Results: []map[string]any{ + { + "Age": int64(90), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/net/state/simple/peer/with_create_add_field_test.go b/tests/integration/net/state/simple/peer/with_create_add_field_test.go index 46ad3c5a9c..22133c78af 100644 --- a/tests/integration/net/state/simple/peer/with_create_add_field_test.go +++ b/tests/integration/net/state/simple/peer/with_create_add_field_test.go @@ -284,6 +284,7 @@ func TestP2PPeerCreateWithNewFieldDocSyncedBeforeReceivingNodeSchemaUpdatedDoesN { "Name": "John", // The email should be returned but it is not + "Email": nil, }, }, }, diff --git a/tests/integration/net/state/simple/peer_replicator/crdt/pcounter_test.go b/tests/integration/net/state/simple/peer_replicator/crdt/pcounter_test.go new file mode 100644 index 0000000000..a7b3c67a59 --- /dev/null +++ b/tests/integration/net/state/simple/peer_replicator/crdt/pcounter_test.go @@ -0,0 +1,160 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package peer_replicator_test + +import ( + "testing" + + "github.com/sourcenetwork/immutable" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestP2PPeerReplicatorWithCreate_PCounter_NoError(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.RandomNetworkingConfig(), + testUtils.RandomNetworkingConfig(), + testUtils.RandomNetworkingConfig(), + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + points: Int @crdt(type: "pcounter") + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "points": 0 + }`, + }, + testUtils.ConfigureReplicator{ + SourceNodeID: 0, + TargetNodeID: 2, + }, + testUtils.ConnectPeers{ + SourceNodeID: 0, + TargetNodeID: 1, + }, + testUtils.CreateDoc{ + NodeID: immutable.Some(0), + Doc: `{ + "name": "Shahzad", + "points": 3000 + }`, + }, + testUtils.WaitForSync{}, + testUtils.Request{ + NodeID: immutable.Some(0), + Request: `query { + Users { + points + } + }`, + Results: []map[string]any{ + { + "points": int64(0), + }, + { + "points": int64(3000), + }, + }, + }, + testUtils.Request{ + NodeID: immutable.Some(1), + Request: `query { + Users { + points + } + }`, + Results: []map[string]any{ + { + "points": int64(0), + }, + }, + }, + testUtils.Request{ + NodeID: immutable.Some(2), + Request: `query { + Users { + points + } + }`, + Results: []map[string]any{ + { + "points": int64(0), + }, + { + "points": int64(3000), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestP2PPeerReplicatorWithUpdate_PCounter_NoError(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.RandomNetworkingConfig(), + testUtils.RandomNetworkingConfig(), + testUtils.RandomNetworkingConfig(), + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + points: Int @crdt(type: "pcounter") + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "points": 10 + }`, + }, + testUtils.ConnectPeers{ + SourceNodeID: 1, + TargetNodeID: 0, + }, + testUtils.ConfigureReplicator{ + SourceNodeID: 0, + TargetNodeID: 2, + }, + testUtils.UpdateDoc{ + // Update John's points on the first node only, and allow the value to sync + NodeID: immutable.Some(0), + Doc: `{ + "points": 10 + }`, + }, + testUtils.WaitForSync{}, + testUtils.Request{ + Request: `query { + Users { + points + } + }`, + Results: []map[string]any{ + { + "points": int64(20), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/net/state/simple/replicator/crdt/pcounter_test.go b/tests/integration/net/state/simple/replicator/crdt/pcounter_test.go new file mode 100644 index 0000000000..33ea5d136d --- /dev/null +++ b/tests/integration/net/state/simple/replicator/crdt/pcounter_test.go @@ -0,0 +1,71 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package replicator + +import ( + "testing" + + "github.com/sourcenetwork/immutable" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestP2POneToOneReplicatorUpdate_PCounter_NoError(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.RandomNetworkingConfig(), + testUtils.RandomNetworkingConfig(), + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + points: Int @crdt(type: "pcounter") + } + `, + }, + testUtils.CreateDoc{ + // This document is created in first node before the replicator is set up. + // Updates should be synced across nodes. + NodeID: immutable.Some(0), + Doc: `{ + "name": "John", + "points": 10 + }`, + }, + testUtils.ConfigureReplicator{ + SourceNodeID: 0, + TargetNodeID: 1, + }, + testUtils.UpdateDoc{ + // Update John's points on the first node only, and allow the value to sync + NodeID: immutable.Some(0), + Doc: `{ + "points": 10 + }`, + }, + testUtils.WaitForSync{}, + testUtils.Request{ + Request: `query { + Users { + points + } + }`, + Results: []map[string]any{ + { + "points": int64(20), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/net/state/simple/replicator/with_create_test.go b/tests/integration/net/state/simple/replicator/with_create_test.go index 3cec12b351..0d3dbad143 100644 --- a/tests/integration/net/state/simple/replicator/with_create_test.go +++ b/tests/integration/net/state/simple/replicator/with_create_test.go @@ -492,7 +492,7 @@ func TestP2POneToOneReplicatorOrderIndependent(t *testing.T) { "name": "John", "_version": []map[string]any{ { - "schemaVersionId": "bafkreiewca6o66mgkpbai2vtrupolvtf66wllbvouvtwo6fkc6alrybzfa", + "schemaVersionId": "bafkreihhd6bqrjhl5zidwztgxzeseveplv3cj3fwtn3unjkdx7j2vr2vrq", }, }, }, @@ -552,7 +552,7 @@ func TestP2POneToOneReplicatorOrderIndependentDirectCreate(t *testing.T) { "_docID": "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", "_version": []map[string]any{ { - "schemaVersionId": "bafkreiewca6o66mgkpbai2vtrupolvtf66wllbvouvtwo6fkc6alrybzfa", + "schemaVersionId": "bafkreihhd6bqrjhl5zidwztgxzeseveplv3cj3fwtn3unjkdx7j2vr2vrq", }, }, }, diff --git a/tests/integration/p2p.go b/tests/integration/p2p.go index 4d48cb033b..0cace429ae 100644 --- a/tests/integration/p2p.go +++ b/tests/integration/p2p.go @@ -14,11 +14,11 @@ import ( "time" "github.com/sourcenetwork/defradb/client" - "github.com/sourcenetwork/defradb/logging" "github.com/sourcenetwork/defradb/net" "github.com/sourcenetwork/defradb/tests/clients" "github.com/libp2p/go-libp2p/core/peer" + "github.com/sourcenetwork/corelog" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) @@ -55,6 +55,12 @@ type ConfigureReplicator struct { // TargetNodeID is the node ID (index) of the node to which data should be replicated. TargetNodeID int + + // Any error expected from the action. Optional. + // + // String can be a partial, and the test will pass if an error is returned that + // contains this string. + ExpectedError string } // DeleteReplicator deletes a directional replicator relationship between two nodes. @@ -149,7 +155,7 @@ func connectPeers( targetNode := s.nodes[cfg.TargetNodeID] addrs := []peer.AddrInfo{targetNode.PeerInfo()} - log.Info(s.ctx, "Bootstrapping with peers", logging.NewKV("Addresses", addrs)) + log.InfoContext(s.ctx, "Bootstrapping with peers", corelog.Any("Addresses", addrs)) sourceNode.Bootstrap(addrs) // Bootstrap triggers a bunch of async stuff for which we have no good way of waiting on. It must be @@ -307,8 +313,12 @@ func configureReplicator( err := sourceNode.SetReplicator(s.ctx, client.Replicator{ Info: targetNode.PeerInfo(), }) - require.NoError(s.t, err) - setupReplicatorWaitSync(s, 0, cfg, sourceNode, targetNode) + + expectedErrorRaised := AssertError(s.t, s.testCase.Description, err, cfg.ExpectedError) + assertExpectedErrorRaised(s.t, s.testCase.Description, cfg.ExpectedError, expectedErrorRaised) + if err == nil { + setupReplicatorWaitSync(s, 0, cfg, sourceNode, targetNode) + } } func deleteReplicator( diff --git a/tests/integration/query/commits/simple_test.go b/tests/integration/query/commits/simple_test.go index 4239e7cfd6..b90d5d0ea4 100644 --- a/tests/integration/query/commits/simple_test.go +++ b/tests/integration/query/commits/simple_test.go @@ -36,13 +36,13 @@ func TestQueryCommits(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeicvpgfinf2m2jufbbcy5mhv6jca6in5k4fzx5op7xvvcmbp7sceaa", + "cid": "bafybeietdefm4jtgcbpof5elqdptwnuevvzujb3j22n6dytx67vpmb3xn4", }, { - "cid": "bafybeib2espk2hq366wjnmazg45uvoswqbvf4plx7fgzayagxdn737onci", + "cid": "bafybeihc26puzzgnctvgvgowihytif52tmdj3y2ksx6tvge2wtjkjvtszi", }, { - "cid": "bafybeigvpf62j7j2wbpid5iavzxielbhbsbbirmgzqkw3wpptdvysuztwi", + "cid": "bafybeiafufeqwjo5eeeaobwiu6ibf73dnvlefr47mrx45z42337mfnezkq", }, }, }, @@ -79,22 +79,22 @@ func TestQueryCommitsMultipleDocs(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeibhain2764v7eltfiam6dgwivfj56mvbme34nbdsdbndrsjkc2cje", + "cid": "bafybeihnalsemihsyycy3vaxbhq6iqrixmsk5k3idq52u76h2f5wkvobx4", }, { - "cid": "bafybeickrd5xayjhedyypf3yus55bkhpwd5dqlkdhivrcceexkpsgnic24", + "cid": "bafybeifxk5rhzuemqn2o35hh7346gydqlfmhkdzeguiqo5vczgyz4xz7rm", }, { - "cid": "bafybeieqyyprwrkbgyn7x4jkzmlnupnzpdymvbulef37brkzn7blqbe6l4", + "cid": "bafybeig36zwhejk54nvvey5wsfbl7rzm7xscsyji5uqp6j4hw4zh7dhep4", }, { - "cid": "bafybeicvpgfinf2m2jufbbcy5mhv6jca6in5k4fzx5op7xvvcmbp7sceaa", + "cid": "bafybeietdefm4jtgcbpof5elqdptwnuevvzujb3j22n6dytx67vpmb3xn4", }, { - "cid": "bafybeib2espk2hq366wjnmazg45uvoswqbvf4plx7fgzayagxdn737onci", + "cid": "bafybeihc26puzzgnctvgvgowihytif52tmdj3y2ksx6tvge2wtjkjvtszi", }, { - "cid": "bafybeigvpf62j7j2wbpid5iavzxielbhbsbbirmgzqkw3wpptdvysuztwi", + "cid": "bafybeiafufeqwjo5eeeaobwiu6ibf73dnvlefr47mrx45z42337mfnezkq", }, }, }, @@ -125,16 +125,16 @@ func TestQueryCommitsWithSchemaVersionIdField(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeicvpgfinf2m2jufbbcy5mhv6jca6in5k4fzx5op7xvvcmbp7sceaa", - "schemaVersionId": "bafkreidqkjb23ngp34eebeaxiogrlogkpfz62vjb3clnnyvhlbgdaywkg4", + "cid": "bafybeietdefm4jtgcbpof5elqdptwnuevvzujb3j22n6dytx67vpmb3xn4", + "schemaVersionId": "bafkreicprhqxzlw3akyssz2v6pifwfueavp7jq2yj3dghapi3qcq6achs4", }, { - "cid": "bafybeib2espk2hq366wjnmazg45uvoswqbvf4plx7fgzayagxdn737onci", - "schemaVersionId": "bafkreidqkjb23ngp34eebeaxiogrlogkpfz62vjb3clnnyvhlbgdaywkg4", + "cid": "bafybeihc26puzzgnctvgvgowihytif52tmdj3y2ksx6tvge2wtjkjvtszi", + "schemaVersionId": "bafkreicprhqxzlw3akyssz2v6pifwfueavp7jq2yj3dghapi3qcq6achs4", }, { - "cid": "bafybeigvpf62j7j2wbpid5iavzxielbhbsbbirmgzqkw3wpptdvysuztwi", - "schemaVersionId": "bafkreidqkjb23ngp34eebeaxiogrlogkpfz62vjb3clnnyvhlbgdaywkg4", + "cid": "bafybeiafufeqwjo5eeeaobwiu6ibf73dnvlefr47mrx45z42337mfnezkq", + "schemaVersionId": "bafkreicprhqxzlw3akyssz2v6pifwfueavp7jq2yj3dghapi3qcq6achs4", }, }, }, @@ -349,7 +349,7 @@ func TestQuery_CommitsWithAllFieldsWithUpdate_NoError(t *testing.T) { `, Results: []map[string]any{ { - "cid": "bafybeicwg56ddi7smy3j2kkv5y4yghvdrj3twqqafzdwtinbkw5mlpxwz4", + "cid": "bafybeiaho26jaxdjfuvyxozws6ushksjwidllvgai6kgxmqxhzylwzkvte", "collectionID": int64(1), "delta": testUtils.CBORValue(22), "docID": "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", @@ -358,13 +358,13 @@ func TestQuery_CommitsWithAllFieldsWithUpdate_NoError(t *testing.T) { "height": int64(2), "links": []map[string]any{ { - "cid": "bafybeicvpgfinf2m2jufbbcy5mhv6jca6in5k4fzx5op7xvvcmbp7sceaa", + "cid": "bafybeietdefm4jtgcbpof5elqdptwnuevvzujb3j22n6dytx67vpmb3xn4", "name": "_head", }, }, }, { - "cid": "bafybeicvpgfinf2m2jufbbcy5mhv6jca6in5k4fzx5op7xvvcmbp7sceaa", + "cid": "bafybeietdefm4jtgcbpof5elqdptwnuevvzujb3j22n6dytx67vpmb3xn4", "collectionID": int64(1), "delta": testUtils.CBORValue(21), "docID": "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", @@ -374,7 +374,7 @@ func TestQuery_CommitsWithAllFieldsWithUpdate_NoError(t *testing.T) { "links": []map[string]any{}, }, { - "cid": "bafybeib2espk2hq366wjnmazg45uvoswqbvf4plx7fgzayagxdn737onci", + "cid": "bafybeihc26puzzgnctvgvgowihytif52tmdj3y2ksx6tvge2wtjkjvtszi", "collectionID": int64(1), "delta": testUtils.CBORValue("John"), "docID": "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", @@ -384,7 +384,7 @@ func TestQuery_CommitsWithAllFieldsWithUpdate_NoError(t *testing.T) { "links": []map[string]any{}, }, { - "cid": "bafybeidxnkwhuzmkdw5wuippru3tp74vcmz5jvcziambpjadxeathdh26a", + "cid": "bafybeiep7c6ouykgidnwzjeasyim3ost5qjkro4qvs62t4u4u7rolbmugm", "collectionID": int64(1), "delta": nil, "docID": "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", @@ -393,17 +393,17 @@ func TestQuery_CommitsWithAllFieldsWithUpdate_NoError(t *testing.T) { "height": int64(2), "links": []map[string]any{ { - "cid": "bafybeigvpf62j7j2wbpid5iavzxielbhbsbbirmgzqkw3wpptdvysuztwi", + "cid": "bafybeiafufeqwjo5eeeaobwiu6ibf73dnvlefr47mrx45z42337mfnezkq", "name": "_head", }, { - "cid": "bafybeicwg56ddi7smy3j2kkv5y4yghvdrj3twqqafzdwtinbkw5mlpxwz4", + "cid": "bafybeiaho26jaxdjfuvyxozws6ushksjwidllvgai6kgxmqxhzylwzkvte", "name": "age", }, }, }, { - "cid": "bafybeigvpf62j7j2wbpid5iavzxielbhbsbbirmgzqkw3wpptdvysuztwi", + "cid": "bafybeiafufeqwjo5eeeaobwiu6ibf73dnvlefr47mrx45z42337mfnezkq", "collectionID": int64(1), "delta": nil, "docID": "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", @@ -412,11 +412,11 @@ func TestQuery_CommitsWithAllFieldsWithUpdate_NoError(t *testing.T) { "height": int64(1), "links": []map[string]any{ { - "cid": "bafybeicvpgfinf2m2jufbbcy5mhv6jca6in5k4fzx5op7xvvcmbp7sceaa", + "cid": "bafybeietdefm4jtgcbpof5elqdptwnuevvzujb3j22n6dytx67vpmb3xn4", "name": "age", }, { - "cid": "bafybeib2espk2hq366wjnmazg45uvoswqbvf4plx7fgzayagxdn737onci", + "cid": "bafybeihc26puzzgnctvgvgowihytif52tmdj3y2ksx6tvge2wtjkjvtszi", "name": "name", }, }, diff --git a/tests/integration/query/commits/with_cid_test.go b/tests/integration/query/commits/with_cid_test.go index 4878ea8f9a..22f2caa5c2 100644 --- a/tests/integration/query/commits/with_cid_test.go +++ b/tests/integration/query/commits/with_cid_test.go @@ -38,14 +38,14 @@ func TestQueryCommitsWithCid(t *testing.T) { testUtils.Request{ Request: `query { commits( - cid: "bafybeigvpf62j7j2wbpid5iavzxielbhbsbbirmgzqkw3wpptdvysuztwi" + cid: "bafybeiafufeqwjo5eeeaobwiu6ibf73dnvlefr47mrx45z42337mfnezkq" ) { cid } }`, Results: []map[string]any{ { - "cid": "bafybeigvpf62j7j2wbpid5iavzxielbhbsbbirmgzqkw3wpptdvysuztwi", + "cid": "bafybeiafufeqwjo5eeeaobwiu6ibf73dnvlefr47mrx45z42337mfnezkq", }, }, }, @@ -71,14 +71,14 @@ func TestQueryCommitsWithCidForFieldCommit(t *testing.T) { testUtils.Request{ Request: `query { commits( - cid: "bafybeigvpf62j7j2wbpid5iavzxielbhbsbbirmgzqkw3wpptdvysuztwi" + cid: "bafybeiafufeqwjo5eeeaobwiu6ibf73dnvlefr47mrx45z42337mfnezkq" ) { cid } }`, Results: []map[string]any{ { - "cid": "bafybeigvpf62j7j2wbpid5iavzxielbhbsbbirmgzqkw3wpptdvysuztwi", + "cid": "bafybeiafufeqwjo5eeeaobwiu6ibf73dnvlefr47mrx45z42337mfnezkq", }, }, }, diff --git a/tests/integration/query/commits/with_depth_test.go b/tests/integration/query/commits/with_depth_test.go index cdda45101c..3475985174 100644 --- a/tests/integration/query/commits/with_depth_test.go +++ b/tests/integration/query/commits/with_depth_test.go @@ -36,13 +36,13 @@ func TestQueryCommitsWithDepth1(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeicvpgfinf2m2jufbbcy5mhv6jca6in5k4fzx5op7xvvcmbp7sceaa", + "cid": "bafybeietdefm4jtgcbpof5elqdptwnuevvzujb3j22n6dytx67vpmb3xn4", }, { - "cid": "bafybeib2espk2hq366wjnmazg45uvoswqbvf4plx7fgzayagxdn737onci", + "cid": "bafybeihc26puzzgnctvgvgowihytif52tmdj3y2ksx6tvge2wtjkjvtszi", }, { - "cid": "bafybeigvpf62j7j2wbpid5iavzxielbhbsbbirmgzqkw3wpptdvysuztwi", + "cid": "bafybeiafufeqwjo5eeeaobwiu6ibf73dnvlefr47mrx45z42337mfnezkq", }, }, }, @@ -81,16 +81,16 @@ func TestQueryCommitsWithDepth1WithUpdate(t *testing.T) { Results: []map[string]any{ { // "Age" field head - "cid": "bafybeicwg56ddi7smy3j2kkv5y4yghvdrj3twqqafzdwtinbkw5mlpxwz4", + "cid": "bafybeiaho26jaxdjfuvyxozws6ushksjwidllvgai6kgxmqxhzylwzkvte", "height": int64(2), }, { // "Name" field head (unchanged from create) - "cid": "bafybeib2espk2hq366wjnmazg45uvoswqbvf4plx7fgzayagxdn737onci", + "cid": "bafybeihc26puzzgnctvgvgowihytif52tmdj3y2ksx6tvge2wtjkjvtszi", "height": int64(1), }, { - "cid": "bafybeidxnkwhuzmkdw5wuippru3tp74vcmz5jvcziambpjadxeathdh26a", + "cid": "bafybeiep7c6ouykgidnwzjeasyim3ost5qjkro4qvs62t4u4u7rolbmugm", "height": int64(2), }, }, @@ -137,27 +137,27 @@ func TestQueryCommitsWithDepth2WithUpdate(t *testing.T) { Results: []map[string]any{ { // Composite head - "cid": "bafybeic45wkhxtpn3vgd2dmmohq76vw56qz3cpu3oorha3hf2w6qu7bpoa", + "cid": "bafybeigzaxekosbmrfrzjhkztodipzmz3voiqnia275347b6vkq5keouf4", "height": int64(3), }, { // Composite head -1 - "cid": "bafybeicwg56ddi7smy3j2kkv5y4yghvdrj3twqqafzdwtinbkw5mlpxwz4", + "cid": "bafybeiaho26jaxdjfuvyxozws6ushksjwidllvgai6kgxmqxhzylwzkvte", "height": int64(2), }, { // "Name" field head (unchanged from create) - "cid": "bafybeib2espk2hq366wjnmazg45uvoswqbvf4plx7fgzayagxdn737onci", + "cid": "bafybeihc26puzzgnctvgvgowihytif52tmdj3y2ksx6tvge2wtjkjvtszi", "height": int64(1), }, { // "Age" field head - "cid": "bafybeid4y4vqmvec2mvm3su77rrmj6tzsx5zdlt6ias4hzqxbevmosydc4", + "cid": "bafybeifwa5vgfvnrdwzqmojsxilwbg2k37axh2fs57zfmddz3l5yivn4la", "height": int64(3), }, { // "Age" field head -1 - "cid": "bafybeidxnkwhuzmkdw5wuippru3tp74vcmz5jvcziambpjadxeathdh26a", + "cid": "bafybeiep7c6ouykgidnwzjeasyim3ost5qjkro4qvs62t4u4u7rolbmugm", "height": int64(2), }, }, @@ -195,22 +195,22 @@ func TestQueryCommitsWithDepth1AndMultipleDocs(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeifysgo74dhzl2t74j5qh32t5uufar7otua6ggvsapjbxpzimcbnoi", + "cid": "bafybeicacj5fmr267b6kkmv4ck3g5cm5odca7hu7ajwagfttpspbsu7n5u", }, { - "cid": "bafybeibqvujxi4tjtrwg5igvg6zdvjaxvkmb5h2msjbtta3lmytgs7hft4", + "cid": "bafybeiexu7xpwhyo2azo2ap2nbny5d4chhr725xrhmxnt5ebabucyjlfqu", }, { - "cid": "bafybeib7zmofgbtvxcb3gy3bfbwp3btqrmoacmxl4duqhwlvwu6pihzbeu", + "cid": "bafybeibbp6jn7y2t6jakbdtvboruieo3iobyuumppbwbw7rwkmz4tdh5yq", }, { - "cid": "bafybeicvpgfinf2m2jufbbcy5mhv6jca6in5k4fzx5op7xvvcmbp7sceaa", + "cid": "bafybeietdefm4jtgcbpof5elqdptwnuevvzujb3j22n6dytx67vpmb3xn4", }, { - "cid": "bafybeib2espk2hq366wjnmazg45uvoswqbvf4plx7fgzayagxdn737onci", + "cid": "bafybeihc26puzzgnctvgvgowihytif52tmdj3y2ksx6tvge2wtjkjvtszi", }, { - "cid": "bafybeigvpf62j7j2wbpid5iavzxielbhbsbbirmgzqkw3wpptdvysuztwi", + "cid": "bafybeiafufeqwjo5eeeaobwiu6ibf73dnvlefr47mrx45z42337mfnezkq", }, }, }, diff --git a/tests/integration/query/commits/with_doc_id_cid_test.go b/tests/integration/query/commits/with_doc_id_cid_test.go index 434e8b27aa..9f61805048 100644 --- a/tests/integration/query/commits/with_doc_id_cid_test.go +++ b/tests/integration/query/commits/with_doc_id_cid_test.go @@ -104,14 +104,14 @@ func TestQueryCommitsWithDocIDAndCidWithUpdate(t *testing.T) { Request: ` { commits( docID: "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", - cid: "bafybeidxnkwhuzmkdw5wuippru3tp74vcmz5jvcziambpjadxeathdh26a" + cid: "bafybeiep7c6ouykgidnwzjeasyim3ost5qjkro4qvs62t4u4u7rolbmugm" ) { cid } }`, Results: []map[string]any{ { - "cid": "bafybeidxnkwhuzmkdw5wuippru3tp74vcmz5jvcziambpjadxeathdh26a", + "cid": "bafybeiep7c6ouykgidnwzjeasyim3ost5qjkro4qvs62t4u4u7rolbmugm", }, }, }, diff --git a/tests/integration/query/commits/with_doc_id_count_test.go b/tests/integration/query/commits/with_doc_id_count_test.go index 3cd01352ad..89ba666163 100644 --- a/tests/integration/query/commits/with_doc_id_count_test.go +++ b/tests/integration/query/commits/with_doc_id_count_test.go @@ -37,15 +37,15 @@ func TestQueryCommitsWithDocIDAndLinkCount(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeicvpgfinf2m2jufbbcy5mhv6jca6in5k4fzx5op7xvvcmbp7sceaa", + "cid": "bafybeietdefm4jtgcbpof5elqdptwnuevvzujb3j22n6dytx67vpmb3xn4", "_count": 0, }, { - "cid": "bafybeib2espk2hq366wjnmazg45uvoswqbvf4plx7fgzayagxdn737onci", + "cid": "bafybeihc26puzzgnctvgvgowihytif52tmdj3y2ksx6tvge2wtjkjvtszi", "_count": 0, }, { - "cid": "bafybeigvpf62j7j2wbpid5iavzxielbhbsbbirmgzqkw3wpptdvysuztwi", + "cid": "bafybeiafufeqwjo5eeeaobwiu6ibf73dnvlefr47mrx45z42337mfnezkq", "_count": 2, }, }, diff --git a/tests/integration/query/commits/with_doc_id_field_test.go b/tests/integration/query/commits/with_doc_id_field_test.go index de672e8d70..65fb4a5637 100644 --- a/tests/integration/query/commits/with_doc_id_field_test.go +++ b/tests/integration/query/commits/with_doc_id_field_test.go @@ -118,7 +118,7 @@ func TestQueryCommitsWithDocIDAndFieldId(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeicvpgfinf2m2jufbbcy5mhv6jca6in5k4fzx5op7xvvcmbp7sceaa", + "cid": "bafybeietdefm4jtgcbpof5elqdptwnuevvzujb3j22n6dytx67vpmb3xn4", }, }, }, @@ -150,7 +150,7 @@ func TestQueryCommitsWithDocIDAndCompositeFieldId(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeigvpf62j7j2wbpid5iavzxielbhbsbbirmgzqkw3wpptdvysuztwi", + "cid": "bafybeiafufeqwjo5eeeaobwiu6ibf73dnvlefr47mrx45z42337mfnezkq", }, }, }, diff --git a/tests/integration/query/commits/with_doc_id_limit_offset_test.go b/tests/integration/query/commits/with_doc_id_limit_offset_test.go index d7981cc6ce..47b21aaf08 100644 --- a/tests/integration/query/commits/with_doc_id_limit_offset_test.go +++ b/tests/integration/query/commits/with_doc_id_limit_offset_test.go @@ -57,10 +57,10 @@ func TestQueryCommitsWithDocIDAndLimitAndOffset(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeic45wkhxtpn3vgd2dmmohq76vw56qz3cpu3oorha3hf2w6qu7bpoa", + "cid": "bafybeigzaxekosbmrfrzjhkztodipzmz3voiqnia275347b6vkq5keouf4", }, { - "cid": "bafybeicwg56ddi7smy3j2kkv5y4yghvdrj3twqqafzdwtinbkw5mlpxwz4", + "cid": "bafybeiaho26jaxdjfuvyxozws6ushksjwidllvgai6kgxmqxhzylwzkvte", }, }, }, diff --git a/tests/integration/query/commits/with_doc_id_limit_test.go b/tests/integration/query/commits/with_doc_id_limit_test.go index b31a3b848e..938ce72ea9 100644 --- a/tests/integration/query/commits/with_doc_id_limit_test.go +++ b/tests/integration/query/commits/with_doc_id_limit_test.go @@ -50,10 +50,10 @@ func TestQueryCommitsWithDocIDAndLimit(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeic45wkhxtpn3vgd2dmmohq76vw56qz3cpu3oorha3hf2w6qu7bpoa", + "cid": "bafybeigzaxekosbmrfrzjhkztodipzmz3voiqnia275347b6vkq5keouf4", }, { - "cid": "bafybeicwg56ddi7smy3j2kkv5y4yghvdrj3twqqafzdwtinbkw5mlpxwz4", + "cid": "bafybeiaho26jaxdjfuvyxozws6ushksjwidllvgai6kgxmqxhzylwzkvte", }, }, }, diff --git a/tests/integration/query/commits/with_doc_id_order_limit_offset_test.go b/tests/integration/query/commits/with_doc_id_order_limit_offset_test.go index 135418b8f2..058825acca 100644 --- a/tests/integration/query/commits/with_doc_id_order_limit_offset_test.go +++ b/tests/integration/query/commits/with_doc_id_order_limit_offset_test.go @@ -58,11 +58,11 @@ func TestQueryCommitsWithDocIDAndOrderAndLimitAndOffset(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeidxnkwhuzmkdw5wuippru3tp74vcmz5jvcziambpjadxeathdh26a", + "cid": "bafybeiep7c6ouykgidnwzjeasyim3ost5qjkro4qvs62t4u4u7rolbmugm", "height": int64(2), }, { - "cid": "bafybeic45wkhxtpn3vgd2dmmohq76vw56qz3cpu3oorha3hf2w6qu7bpoa", + "cid": "bafybeigzaxekosbmrfrzjhkztodipzmz3voiqnia275347b6vkq5keouf4", "height": int64(3), }, }, diff --git a/tests/integration/query/commits/with_doc_id_order_test.go b/tests/integration/query/commits/with_doc_id_order_test.go index 10009bab11..70ab643688 100644 --- a/tests/integration/query/commits/with_doc_id_order_test.go +++ b/tests/integration/query/commits/with_doc_id_order_test.go @@ -44,23 +44,23 @@ func TestQueryCommitsWithDocIDAndOrderHeightDesc(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeicwg56ddi7smy3j2kkv5y4yghvdrj3twqqafzdwtinbkw5mlpxwz4", + "cid": "bafybeiaho26jaxdjfuvyxozws6ushksjwidllvgai6kgxmqxhzylwzkvte", "height": int64(2), }, { - "cid": "bafybeidxnkwhuzmkdw5wuippru3tp74vcmz5jvcziambpjadxeathdh26a", + "cid": "bafybeiep7c6ouykgidnwzjeasyim3ost5qjkro4qvs62t4u4u7rolbmugm", "height": int64(2), }, { - "cid": "bafybeicvpgfinf2m2jufbbcy5mhv6jca6in5k4fzx5op7xvvcmbp7sceaa", + "cid": "bafybeietdefm4jtgcbpof5elqdptwnuevvzujb3j22n6dytx67vpmb3xn4", "height": int64(1), }, { - "cid": "bafybeib2espk2hq366wjnmazg45uvoswqbvf4plx7fgzayagxdn737onci", + "cid": "bafybeihc26puzzgnctvgvgowihytif52tmdj3y2ksx6tvge2wtjkjvtszi", "height": int64(1), }, { - "cid": "bafybeigvpf62j7j2wbpid5iavzxielbhbsbbirmgzqkw3wpptdvysuztwi", + "cid": "bafybeiafufeqwjo5eeeaobwiu6ibf73dnvlefr47mrx45z42337mfnezkq", "height": int64(1), }, }, @@ -99,23 +99,23 @@ func TestQueryCommitsWithDocIDAndOrderHeightAsc(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeicvpgfinf2m2jufbbcy5mhv6jca6in5k4fzx5op7xvvcmbp7sceaa", + "cid": "bafybeietdefm4jtgcbpof5elqdptwnuevvzujb3j22n6dytx67vpmb3xn4", "height": int64(1), }, { - "cid": "bafybeib2espk2hq366wjnmazg45uvoswqbvf4plx7fgzayagxdn737onci", + "cid": "bafybeihc26puzzgnctvgvgowihytif52tmdj3y2ksx6tvge2wtjkjvtszi", "height": int64(1), }, { - "cid": "bafybeigvpf62j7j2wbpid5iavzxielbhbsbbirmgzqkw3wpptdvysuztwi", + "cid": "bafybeiafufeqwjo5eeeaobwiu6ibf73dnvlefr47mrx45z42337mfnezkq", "height": int64(1), }, { - "cid": "bafybeicwg56ddi7smy3j2kkv5y4yghvdrj3twqqafzdwtinbkw5mlpxwz4", + "cid": "bafybeiaho26jaxdjfuvyxozws6ushksjwidllvgai6kgxmqxhzylwzkvte", "height": int64(2), }, { - "cid": "bafybeidxnkwhuzmkdw5wuippru3tp74vcmz5jvcziambpjadxeathdh26a", + "cid": "bafybeiep7c6ouykgidnwzjeasyim3ost5qjkro4qvs62t4u4u7rolbmugm", "height": int64(2), }, }, @@ -154,23 +154,23 @@ func TestQueryCommitsWithDocIDAndOrderCidDesc(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeigvpf62j7j2wbpid5iavzxielbhbsbbirmgzqkw3wpptdvysuztwi", + "cid": "bafybeihc26puzzgnctvgvgowihytif52tmdj3y2ksx6tvge2wtjkjvtszi", "height": int64(1), }, { - "cid": "bafybeidxnkwhuzmkdw5wuippru3tp74vcmz5jvcziambpjadxeathdh26a", - "height": int64(2), + "cid": "bafybeietdefm4jtgcbpof5elqdptwnuevvzujb3j22n6dytx67vpmb3xn4", + "height": int64(1), }, { - "cid": "bafybeicwg56ddi7smy3j2kkv5y4yghvdrj3twqqafzdwtinbkw5mlpxwz4", + "cid": "bafybeiep7c6ouykgidnwzjeasyim3ost5qjkro4qvs62t4u4u7rolbmugm", "height": int64(2), }, { - "cid": "bafybeicvpgfinf2m2jufbbcy5mhv6jca6in5k4fzx5op7xvvcmbp7sceaa", - "height": int64(1), + "cid": "bafybeiaho26jaxdjfuvyxozws6ushksjwidllvgai6kgxmqxhzylwzkvte", + "height": int64(2), }, { - "cid": "bafybeib2espk2hq366wjnmazg45uvoswqbvf4plx7fgzayagxdn737onci", + "cid": "bafybeiafufeqwjo5eeeaobwiu6ibf73dnvlefr47mrx45z42337mfnezkq", "height": int64(1), }, }, @@ -209,23 +209,23 @@ func TestQueryCommitsWithDocIDAndOrderCidAsc(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeib2espk2hq366wjnmazg45uvoswqbvf4plx7fgzayagxdn737onci", + "cid": "bafybeiafufeqwjo5eeeaobwiu6ibf73dnvlefr47mrx45z42337mfnezkq", "height": int64(1), }, { - "cid": "bafybeicvpgfinf2m2jufbbcy5mhv6jca6in5k4fzx5op7xvvcmbp7sceaa", - "height": int64(1), + "cid": "bafybeiaho26jaxdjfuvyxozws6ushksjwidllvgai6kgxmqxhzylwzkvte", + "height": int64(2), }, { - "cid": "bafybeicwg56ddi7smy3j2kkv5y4yghvdrj3twqqafzdwtinbkw5mlpxwz4", + "cid": "bafybeiep7c6ouykgidnwzjeasyim3ost5qjkro4qvs62t4u4u7rolbmugm", "height": int64(2), }, { - "cid": "bafybeidxnkwhuzmkdw5wuippru3tp74vcmz5jvcziambpjadxeathdh26a", - "height": int64(2), + "cid": "bafybeietdefm4jtgcbpof5elqdptwnuevvzujb3j22n6dytx67vpmb3xn4", + "height": int64(1), }, { - "cid": "bafybeigvpf62j7j2wbpid5iavzxielbhbsbbirmgzqkw3wpptdvysuztwi", + "cid": "bafybeihc26puzzgnctvgvgowihytif52tmdj3y2ksx6tvge2wtjkjvtszi", "height": int64(1), }, }, @@ -278,39 +278,39 @@ func TestQueryCommitsWithDocIDAndOrderAndMultiUpdatesCidAsc(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeicvpgfinf2m2jufbbcy5mhv6jca6in5k4fzx5op7xvvcmbp7sceaa", + "cid": "bafybeietdefm4jtgcbpof5elqdptwnuevvzujb3j22n6dytx67vpmb3xn4", "height": int64(1), }, { - "cid": "bafybeib2espk2hq366wjnmazg45uvoswqbvf4plx7fgzayagxdn737onci", + "cid": "bafybeihc26puzzgnctvgvgowihytif52tmdj3y2ksx6tvge2wtjkjvtszi", "height": int64(1), }, { - "cid": "bafybeigvpf62j7j2wbpid5iavzxielbhbsbbirmgzqkw3wpptdvysuztwi", + "cid": "bafybeiafufeqwjo5eeeaobwiu6ibf73dnvlefr47mrx45z42337mfnezkq", "height": int64(1), }, { - "cid": "bafybeicwg56ddi7smy3j2kkv5y4yghvdrj3twqqafzdwtinbkw5mlpxwz4", + "cid": "bafybeiaho26jaxdjfuvyxozws6ushksjwidllvgai6kgxmqxhzylwzkvte", "height": int64(2), }, { - "cid": "bafybeidxnkwhuzmkdw5wuippru3tp74vcmz5jvcziambpjadxeathdh26a", + "cid": "bafybeiep7c6ouykgidnwzjeasyim3ost5qjkro4qvs62t4u4u7rolbmugm", "height": int64(2), }, { - "cid": "bafybeic45wkhxtpn3vgd2dmmohq76vw56qz3cpu3oorha3hf2w6qu7bpoa", + "cid": "bafybeigzaxekosbmrfrzjhkztodipzmz3voiqnia275347b6vkq5keouf4", "height": int64(3), }, { - "cid": "bafybeid4y4vqmvec2mvm3su77rrmj6tzsx5zdlt6ias4hzqxbevmosydc4", + "cid": "bafybeifwa5vgfvnrdwzqmojsxilwbg2k37axh2fs57zfmddz3l5yivn4la", "height": int64(3), }, { - "cid": "bafybeiatfviresatclvedt6zhk4ys7p6cdts5udqsl33nu5d2hxtw4l6la", + "cid": "bafybeifn2f5lgzall3dzva47khbtib77lt7ve5qyclou3ihi2hy2uqj4nm", "height": int64(4), }, { - "cid": "bafybeiaydxxf7bmeh5ou47z6exa73heg6vjjzznbvrxqbemmu55sdhvuom", + "cid": "bafybeieijpm36ntafrncl4kgx6dkxgpbftcl4f7obbbmagurcgdoj6sl5y", "height": int64(4), }, }, diff --git a/tests/integration/query/commits/with_doc_id_test.go b/tests/integration/query/commits/with_doc_id_test.go index a08f82f3a0..1524409663 100644 --- a/tests/integration/query/commits/with_doc_id_test.go +++ b/tests/integration/query/commits/with_doc_id_test.go @@ -62,13 +62,13 @@ func TestQueryCommitsWithDocID(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeicvpgfinf2m2jufbbcy5mhv6jca6in5k4fzx5op7xvvcmbp7sceaa", + "cid": "bafybeietdefm4jtgcbpof5elqdptwnuevvzujb3j22n6dytx67vpmb3xn4", }, { - "cid": "bafybeib2espk2hq366wjnmazg45uvoswqbvf4plx7fgzayagxdn737onci", + "cid": "bafybeihc26puzzgnctvgvgowihytif52tmdj3y2ksx6tvge2wtjkjvtszi", }, { - "cid": "bafybeigvpf62j7j2wbpid5iavzxielbhbsbbirmgzqkw3wpptdvysuztwi", + "cid": "bafybeiafufeqwjo5eeeaobwiu6ibf73dnvlefr47mrx45z42337mfnezkq", }, }, }, @@ -102,22 +102,22 @@ func TestQueryCommitsWithDocIDAndLinks(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeicvpgfinf2m2jufbbcy5mhv6jca6in5k4fzx5op7xvvcmbp7sceaa", + "cid": "bafybeietdefm4jtgcbpof5elqdptwnuevvzujb3j22n6dytx67vpmb3xn4", "links": []map[string]any{}, }, { - "cid": "bafybeib2espk2hq366wjnmazg45uvoswqbvf4plx7fgzayagxdn737onci", + "cid": "bafybeihc26puzzgnctvgvgowihytif52tmdj3y2ksx6tvge2wtjkjvtszi", "links": []map[string]any{}, }, { - "cid": "bafybeigvpf62j7j2wbpid5iavzxielbhbsbbirmgzqkw3wpptdvysuztwi", + "cid": "bafybeiafufeqwjo5eeeaobwiu6ibf73dnvlefr47mrx45z42337mfnezkq", "links": []map[string]any{ { - "cid": "bafybeicvpgfinf2m2jufbbcy5mhv6jca6in5k4fzx5op7xvvcmbp7sceaa", + "cid": "bafybeietdefm4jtgcbpof5elqdptwnuevvzujb3j22n6dytx67vpmb3xn4", "name": "age", }, { - "cid": "bafybeib2espk2hq366wjnmazg45uvoswqbvf4plx7fgzayagxdn737onci", + "cid": "bafybeihc26puzzgnctvgvgowihytif52tmdj3y2ksx6tvge2wtjkjvtszi", "name": "name", }, }, @@ -158,23 +158,23 @@ func TestQueryCommitsWithDocIDAndUpdate(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeicwg56ddi7smy3j2kkv5y4yghvdrj3twqqafzdwtinbkw5mlpxwz4", + "cid": "bafybeiaho26jaxdjfuvyxozws6ushksjwidllvgai6kgxmqxhzylwzkvte", "height": int64(2), }, { - "cid": "bafybeicvpgfinf2m2jufbbcy5mhv6jca6in5k4fzx5op7xvvcmbp7sceaa", + "cid": "bafybeietdefm4jtgcbpof5elqdptwnuevvzujb3j22n6dytx67vpmb3xn4", "height": int64(1), }, { - "cid": "bafybeib2espk2hq366wjnmazg45uvoswqbvf4plx7fgzayagxdn737onci", + "cid": "bafybeihc26puzzgnctvgvgowihytif52tmdj3y2ksx6tvge2wtjkjvtszi", "height": int64(1), }, { - "cid": "bafybeidxnkwhuzmkdw5wuippru3tp74vcmz5jvcziambpjadxeathdh26a", + "cid": "bafybeiep7c6ouykgidnwzjeasyim3ost5qjkro4qvs62t4u4u7rolbmugm", "height": int64(2), }, { - "cid": "bafybeigvpf62j7j2wbpid5iavzxielbhbsbbirmgzqkw3wpptdvysuztwi", + "cid": "bafybeiafufeqwjo5eeeaobwiu6ibf73dnvlefr47mrx45z42337mfnezkq", "height": int64(1), }, }, @@ -219,44 +219,44 @@ func TestQueryCommitsWithDocIDAndUpdateAndLinks(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeicwg56ddi7smy3j2kkv5y4yghvdrj3twqqafzdwtinbkw5mlpxwz4", + "cid": "bafybeiaho26jaxdjfuvyxozws6ushksjwidllvgai6kgxmqxhzylwzkvte", "links": []map[string]any{ { - "cid": "bafybeicvpgfinf2m2jufbbcy5mhv6jca6in5k4fzx5op7xvvcmbp7sceaa", + "cid": "bafybeietdefm4jtgcbpof5elqdptwnuevvzujb3j22n6dytx67vpmb3xn4", "name": "_head", }, }, }, { - "cid": "bafybeicvpgfinf2m2jufbbcy5mhv6jca6in5k4fzx5op7xvvcmbp7sceaa", + "cid": "bafybeietdefm4jtgcbpof5elqdptwnuevvzujb3j22n6dytx67vpmb3xn4", "links": []map[string]any{}, }, { - "cid": "bafybeib2espk2hq366wjnmazg45uvoswqbvf4plx7fgzayagxdn737onci", + "cid": "bafybeihc26puzzgnctvgvgowihytif52tmdj3y2ksx6tvge2wtjkjvtszi", "links": []map[string]any{}, }, { - "cid": "bafybeidxnkwhuzmkdw5wuippru3tp74vcmz5jvcziambpjadxeathdh26a", + "cid": "bafybeiep7c6ouykgidnwzjeasyim3ost5qjkro4qvs62t4u4u7rolbmugm", "links": []map[string]any{ { - "cid": "bafybeigvpf62j7j2wbpid5iavzxielbhbsbbirmgzqkw3wpptdvysuztwi", + "cid": "bafybeiafufeqwjo5eeeaobwiu6ibf73dnvlefr47mrx45z42337mfnezkq", "name": "_head", }, { - "cid": "bafybeicwg56ddi7smy3j2kkv5y4yghvdrj3twqqafzdwtinbkw5mlpxwz4", + "cid": "bafybeiaho26jaxdjfuvyxozws6ushksjwidllvgai6kgxmqxhzylwzkvte", "name": "age", }, }, }, { - "cid": "bafybeigvpf62j7j2wbpid5iavzxielbhbsbbirmgzqkw3wpptdvysuztwi", + "cid": "bafybeiafufeqwjo5eeeaobwiu6ibf73dnvlefr47mrx45z42337mfnezkq", "links": []map[string]any{ { - "cid": "bafybeicvpgfinf2m2jufbbcy5mhv6jca6in5k4fzx5op7xvvcmbp7sceaa", + "cid": "bafybeietdefm4jtgcbpof5elqdptwnuevvzujb3j22n6dytx67vpmb3xn4", "name": "age", }, { - "cid": "bafybeib2espk2hq366wjnmazg45uvoswqbvf4plx7fgzayagxdn737onci", + "cid": "bafybeihc26puzzgnctvgvgowihytif52tmdj3y2ksx6tvge2wtjkjvtszi", "name": "name", }, }, diff --git a/tests/integration/query/commits/with_doc_id_typename_test.go b/tests/integration/query/commits/with_doc_id_typename_test.go index 09dcc4060f..51bc88a946 100644 --- a/tests/integration/query/commits/with_doc_id_typename_test.go +++ b/tests/integration/query/commits/with_doc_id_typename_test.go @@ -37,15 +37,15 @@ func TestQueryCommitsWithDocIDWithTypeName(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeicvpgfinf2m2jufbbcy5mhv6jca6in5k4fzx5op7xvvcmbp7sceaa", + "cid": "bafybeietdefm4jtgcbpof5elqdptwnuevvzujb3j22n6dytx67vpmb3xn4", "__typename": "Commit", }, { - "cid": "bafybeib2espk2hq366wjnmazg45uvoswqbvf4plx7fgzayagxdn737onci", + "cid": "bafybeihc26puzzgnctvgvgowihytif52tmdj3y2ksx6tvge2wtjkjvtszi", "__typename": "Commit", }, { - "cid": "bafybeigvpf62j7j2wbpid5iavzxielbhbsbbirmgzqkw3wpptdvysuztwi", + "cid": "bafybeiafufeqwjo5eeeaobwiu6ibf73dnvlefr47mrx45z42337mfnezkq", "__typename": "Commit", }, }, diff --git a/tests/integration/query/commits/with_field_test.go b/tests/integration/query/commits/with_field_test.go index 01a2204326..1ea35a8d96 100644 --- a/tests/integration/query/commits/with_field_test.go +++ b/tests/integration/query/commits/with_field_test.go @@ -66,7 +66,7 @@ func TestQueryCommitsWithFieldId(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeicvpgfinf2m2jufbbcy5mhv6jca6in5k4fzx5op7xvvcmbp7sceaa", + "cid": "bafybeietdefm4jtgcbpof5elqdptwnuevvzujb3j22n6dytx67vpmb3xn4", }, }, }, @@ -98,7 +98,7 @@ func TestQueryCommitsWithCompositeFieldId(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeigvpf62j7j2wbpid5iavzxielbhbsbbirmgzqkw3wpptdvysuztwi", + "cid": "bafybeiafufeqwjo5eeeaobwiu6ibf73dnvlefr47mrx45z42337mfnezkq", }, }, }, @@ -131,8 +131,8 @@ func TestQueryCommitsWithCompositeFieldIdWithReturnedSchemaVersionId(t *testing. }`, Results: []map[string]any{ { - "cid": "bafybeigvpf62j7j2wbpid5iavzxielbhbsbbirmgzqkw3wpptdvysuztwi", - "schemaVersionId": "bafkreidqkjb23ngp34eebeaxiogrlogkpfz62vjb3clnnyvhlbgdaywkg4", + "cid": "bafybeiafufeqwjo5eeeaobwiu6ibf73dnvlefr47mrx45z42337mfnezkq", + "schemaVersionId": "bafkreicprhqxzlw3akyssz2v6pifwfueavp7jq2yj3dghapi3qcq6achs4", }, }, }, diff --git a/tests/integration/query/commits/with_group_test.go b/tests/integration/query/commits/with_group_test.go index 362829ee0b..1971e6f6dd 100644 --- a/tests/integration/query/commits/with_group_test.go +++ b/tests/integration/query/commits/with_group_test.go @@ -89,10 +89,10 @@ func TestQueryCommitsWithGroupByHeightWithChild(t *testing.T) { "height": int64(2), "_group": []map[string]any{ { - "cid": "bafybeicwg56ddi7smy3j2kkv5y4yghvdrj3twqqafzdwtinbkw5mlpxwz4", + "cid": "bafybeiaho26jaxdjfuvyxozws6ushksjwidllvgai6kgxmqxhzylwzkvte", }, { - "cid": "bafybeidxnkwhuzmkdw5wuippru3tp74vcmz5jvcziambpjadxeathdh26a", + "cid": "bafybeiep7c6ouykgidnwzjeasyim3ost5qjkro4qvs62t4u4u7rolbmugm", }, }, }, @@ -100,13 +100,13 @@ func TestQueryCommitsWithGroupByHeightWithChild(t *testing.T) { "height": int64(1), "_group": []map[string]any{ { - "cid": "bafybeicvpgfinf2m2jufbbcy5mhv6jca6in5k4fzx5op7xvvcmbp7sceaa", + "cid": "bafybeietdefm4jtgcbpof5elqdptwnuevvzujb3j22n6dytx67vpmb3xn4", }, { - "cid": "bafybeib2espk2hq366wjnmazg45uvoswqbvf4plx7fgzayagxdn737onci", + "cid": "bafybeihc26puzzgnctvgvgowihytif52tmdj3y2ksx6tvge2wtjkjvtszi", }, { - "cid": "bafybeigvpf62j7j2wbpid5iavzxielbhbsbbirmgzqkw3wpptdvysuztwi", + "cid": "bafybeiafufeqwjo5eeeaobwiu6ibf73dnvlefr47mrx45z42337mfnezkq", }, }, }, @@ -142,7 +142,7 @@ func TestQueryCommitsWithGroupByCidWithChild(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeicvpgfinf2m2jufbbcy5mhv6jca6in5k4fzx5op7xvvcmbp7sceaa", + "cid": "bafybeietdefm4jtgcbpof5elqdptwnuevvzujb3j22n6dytx67vpmb3xn4", "_group": []map[string]any{ { "height": int64(1), @@ -150,7 +150,7 @@ func TestQueryCommitsWithGroupByCidWithChild(t *testing.T) { }, }, { - "cid": "bafybeib2espk2hq366wjnmazg45uvoswqbvf4plx7fgzayagxdn737onci", + "cid": "bafybeihc26puzzgnctvgvgowihytif52tmdj3y2ksx6tvge2wtjkjvtszi", "_group": []map[string]any{ { "height": int64(1), @@ -158,7 +158,7 @@ func TestQueryCommitsWithGroupByCidWithChild(t *testing.T) { }, }, { - "cid": "bafybeigvpf62j7j2wbpid5iavzxielbhbsbbirmgzqkw3wpptdvysuztwi", + "cid": "bafybeiafufeqwjo5eeeaobwiu6ibf73dnvlefr47mrx45z42337mfnezkq", "_group": []map[string]any{ { "height": int64(1), diff --git a/tests/integration/query/latest_commits/with_doc_id_field_test.go b/tests/integration/query/latest_commits/with_doc_id_field_test.go index c1fce06eb6..0b886b966a 100644 --- a/tests/integration/query/latest_commits/with_doc_id_field_test.go +++ b/tests/integration/query/latest_commits/with_doc_id_field_test.go @@ -68,7 +68,7 @@ func TestQueryLatestCommitsWithDocIDAndFieldId(t *testing.T) { }, Results: []map[string]any{ { - "cid": "bafybeicvpgfinf2m2jufbbcy5mhv6jca6in5k4fzx5op7xvvcmbp7sceaa", + "cid": "bafybeietdefm4jtgcbpof5elqdptwnuevvzujb3j22n6dytx67vpmb3xn4", "links": []map[string]any{}, }, }, @@ -101,14 +101,14 @@ func TestQueryLatestCommitsWithDocIDAndCompositeFieldId(t *testing.T) { }, Results: []map[string]any{ { - "cid": "bafybeigvpf62j7j2wbpid5iavzxielbhbsbbirmgzqkw3wpptdvysuztwi", + "cid": "bafybeiafufeqwjo5eeeaobwiu6ibf73dnvlefr47mrx45z42337mfnezkq", "links": []map[string]any{ { - "cid": "bafybeicvpgfinf2m2jufbbcy5mhv6jca6in5k4fzx5op7xvvcmbp7sceaa", + "cid": "bafybeietdefm4jtgcbpof5elqdptwnuevvzujb3j22n6dytx67vpmb3xn4", "name": "age", }, { - "cid": "bafybeib2espk2hq366wjnmazg45uvoswqbvf4plx7fgzayagxdn737onci", + "cid": "bafybeihc26puzzgnctvgvgowihytif52tmdj3y2ksx6tvge2wtjkjvtszi", "name": "name", }, }, diff --git a/tests/integration/query/latest_commits/with_doc_id_test.go b/tests/integration/query/latest_commits/with_doc_id_test.go index 0c34c4dab2..089f6f5086 100644 --- a/tests/integration/query/latest_commits/with_doc_id_test.go +++ b/tests/integration/query/latest_commits/with_doc_id_test.go @@ -38,14 +38,14 @@ func TestQueryLatestCommitsWithDocID(t *testing.T) { }, Results: []map[string]any{ { - "cid": "bafybeigvpf62j7j2wbpid5iavzxielbhbsbbirmgzqkw3wpptdvysuztwi", + "cid": "bafybeiafufeqwjo5eeeaobwiu6ibf73dnvlefr47mrx45z42337mfnezkq", "links": []map[string]any{ { - "cid": "bafybeicvpgfinf2m2jufbbcy5mhv6jca6in5k4fzx5op7xvvcmbp7sceaa", + "cid": "bafybeietdefm4jtgcbpof5elqdptwnuevvzujb3j22n6dytx67vpmb3xn4", "name": "age", }, { - "cid": "bafybeib2espk2hq366wjnmazg45uvoswqbvf4plx7fgzayagxdn737onci", + "cid": "bafybeihc26puzzgnctvgvgowihytif52tmdj3y2ksx6tvge2wtjkjvtszi", "name": "name", }, }, @@ -75,8 +75,8 @@ func TestQueryLatestCommitsWithDocIDWithSchemaVersionIdField(t *testing.T) { }, Results: []map[string]any{ { - "cid": "bafybeigvpf62j7j2wbpid5iavzxielbhbsbbirmgzqkw3wpptdvysuztwi", - "schemaVersionId": "bafkreidqkjb23ngp34eebeaxiogrlogkpfz62vjb3clnnyvhlbgdaywkg4", + "cid": "bafybeiafufeqwjo5eeeaobwiu6ibf73dnvlefr47mrx45z42337mfnezkq", + "schemaVersionId": "bafkreicprhqxzlw3akyssz2v6pifwfueavp7jq2yj3dghapi3qcq6achs4", }, }, } diff --git a/tests/integration/query/one_to_many/with_cid_doc_id_test.go b/tests/integration/query/one_to_many/with_cid_doc_id_test.go index c9dd0ff4ba..6b896ca6ed 100644 --- a/tests/integration/query/one_to_many/with_cid_doc_id_test.go +++ b/tests/integration/query/one_to_many/with_cid_doc_id_test.go @@ -104,7 +104,7 @@ func TestQueryOneToManyWithCidAndDocID(t *testing.T) { testUtils.Request{ Request: `query { Book ( - cid: "bafybeidshqlc7z2psrtfhmrarsxwxwwis6baxjrzs2x6mdmzsop6b7hnii" + cid: "bafybeia3qbhebdwssoe5udinpbdj4pntb5wjr77ql7ptzq32howbaxz2cu" docID: "bae-b9b83269-1f28-5c3b-ae75-3fb4c00d559d" ) { name @@ -179,7 +179,7 @@ func TestQueryOneToManyWithChildUpdateAndFirstCidAndDocID(t *testing.T) { testUtils.Request{ Request: `query { Book ( - cid: "bafybeidshqlc7z2psrtfhmrarsxwxwwis6baxjrzs2x6mdmzsop6b7hnii", + cid: "bafybeia3qbhebdwssoe5udinpbdj4pntb5wjr77ql7ptzq32howbaxz2cu", docID: "bae-b9b83269-1f28-5c3b-ae75-3fb4c00d559d" ) { name @@ -252,9 +252,10 @@ func TestQueryOneToManyWithParentUpdateAndFirstCidAndDocID(t *testing.T) { testUtils.Request{ Request: `query { Book ( - cid: "bafybeidshqlc7z2psrtfhmrarsxwxwwis6baxjrzs2x6mdmzsop6b7hnii", + cid: "bafybeia3qbhebdwssoe5udinpbdj4pntb5wjr77ql7ptzq32howbaxz2cu", docID: "bae-b9b83269-1f28-5c3b-ae75-3fb4c00d559d" ) { + name rating author { name @@ -324,9 +325,10 @@ func TestQueryOneToManyWithParentUpdateAndLastCidAndDocID(t *testing.T) { testUtils.Request{ Request: `query { Book ( - cid: "bafybeiefqhex3axofwy2gwdynhs6rijwrpkdpwy5fnqnzbk3e7iwcgvrqa", + cid: "bafybeibqkdnc63xh5k4frs3x3k7z7p6sw4usjrhxd4iusbjj2uhxfjfjcq", docID: "bae-b9b83269-1f28-5c3b-ae75-3fb4c00d559d" ) { + name rating author { name diff --git a/tests/integration/query/one_to_many/with_group_related_id_alias_test.go b/tests/integration/query/one_to_many/with_group_related_id_alias_test.go index 9f17d2ffe7..bef01aee48 100644 --- a/tests/integration/query/one_to_many/with_group_related_id_alias_test.go +++ b/tests/integration/query/one_to_many/with_group_related_id_alias_test.go @@ -24,6 +24,7 @@ func TestQueryOneToManyWithParentGroupByOnRelatedTypeFromManySideUsingAlias(t *t Request: `query { Book(groupBy: [author]) { + author_id _group { name rating diff --git a/tests/integration/query/one_to_many/with_group_related_id_test.go b/tests/integration/query/one_to_many/with_group_related_id_test.go index 6b6b6f331f..4eec467480 100644 --- a/tests/integration/query/one_to_many/with_group_related_id_test.go +++ b/tests/integration/query/one_to_many/with_group_related_id_test.go @@ -22,6 +22,7 @@ func TestQueryOneToManyWithParentGroupByOnRelatedTypeIDFromManySide(t *testing.T Description: "One-to-many query with groupBy on related id (from many side).", Request: `query { Book(groupBy: [author_id]) { + author_id _group { name rating diff --git a/tests/integration/query/one_to_many/with_id_field_test.go b/tests/integration/query/one_to_many/with_id_field_test.go index 8a16f1c49a..9f70b0d1b3 100644 --- a/tests/integration/query/one_to_many/with_id_field_test.go +++ b/tests/integration/query/one_to_many/with_id_field_test.go @@ -42,46 +42,7 @@ func TestQueryOneToManyWithIdFieldOnPrimary(t *testing.T) { published: [Book] } `, - }, - testUtils.CreateDoc{ - // bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed - CollectionID: 1, - Doc: `{ - "name": "John Grisham" - }`, - }, - testUtils.CreateDoc{ - CollectionID: 0, - Doc: `{ - "name": "Painted House", - "author_id": 123456 - }`, - }, - testUtils.CreateDoc{ - CollectionID: 0, - Doc: `{ - "name": "A Time for Mercy", - "author_id": "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" - }`, - ExpectedError: "value doesn't contain number; it contains string", - }, - testUtils.Request{ - Request: `query { - Book { - name - author_id - author { - name - } - } - }`, - Results: []map[string]any{ - { - "name": "Painted House", - "author_id": int64(123456), - "author": nil, - }, - }, + ExpectedError: "relational id field of invalid kind. Field: author_id, Expected: ID, Actual: Int", }, }, } diff --git a/tests/integration/query/one_to_one/with_clashing_id_field_test.go b/tests/integration/query/one_to_one/with_clashing_id_field_test.go index f563f1e358..1dd97572ca 100644 --- a/tests/integration/query/one_to_one/with_clashing_id_field_test.go +++ b/tests/integration/query/one_to_one/with_clashing_id_field_test.go @@ -94,22 +94,7 @@ func TestQueryOneToOneWithClashingIdFieldOnPrimary(t *testing.T) { published: Book } `, - }, - testUtils.CreateDoc{ - // bae-d82dbe47-9df1-5e33-bd87-f92e9c378161 - CollectionID: 0, - Doc: `{ - "name": "Painted House", - "author_id": 123456 - }`, - }, - testUtils.CreateDoc{ - CollectionID: 1, - Doc: `{ - "name": "John Grisham", - "published_id": "bae-d82dbe47-9df1-5e33-bd87-f92e9c378161" - }`, - ExpectedError: "target document is already linked to another document.", + ExpectedError: "relational id field of invalid kind. Field: author_id, Expected: ID, Actual: Int", }, }, } diff --git a/tests/integration/query/one_to_one/with_filter_test.go b/tests/integration/query/one_to_one/with_filter_test.go index 9d00cdd416..ab13634667 100644 --- a/tests/integration/query/one_to_one/with_filter_test.go +++ b/tests/integration/query/one_to_one/with_filter_test.go @@ -492,3 +492,130 @@ func TestQueryOneToOneWithCompoundOrFilterThatIncludesRelation(t *testing.T) { testUtils.ExecuteTestCase(t, test) } + +func TestQueryOneToOne_WithCompoundFiltersThatIncludesRelation_ShouldReturnResults(t *testing.T) { + test := testUtils.TestCase{ + Description: "One-to-one relation with _and filter that includes relation", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: bookAuthorGQLSchema, + }, + testUtils.CreateDoc{ + CollectionID: 0, + // bae-fd541c25-229e-5280-b44b-e5c2af3e374d + Doc: `{ + "name": "Painted House", + "rating": 4.9 + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + // bae-437092f3-7817-555c-bf8a-cc1c5a0a0db6 + Doc: `{ + "name": "Some Book", + "rating": 4.0 + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + // bae-66ba0c48-4984-5b44-83dd-edb791a54b7d + Doc: `{ + "name": "Some Other Book", + "rating": 3.0 + }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, + // bae-3bfe0092-e31f-5ebe-a3ba-fa18fac448a6 + Doc: `{ + "name": "John Grisham", + "age": 65, + "verified": true, + "published_id": "bae-fd541c25-229e-5280-b44b-e5c2af3e374d" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, + // bae-5dac8488-0f75-5ddf-b08b-804b3d33a239 + Doc: `{ + "name": "Some Writer", + "age": 45, + "verified": false, + "published_id": "bae-437092f3-7817-555c-bf8a-cc1c5a0a0db6" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, + // bae-8b0c345b-dda7-573c-b5f1-5fa1d70593e1 + Doc: `{ + "name": "Some Other Writer", + "age": 30, + "verified": true, + "published_id": "bae-66ba0c48-4984-5b44-83dd-edb791a54b7d" + }`, + }, + testUtils.Request{ + Request: `query { + Book(filter: {_or: [ + {rating: {_gt: 4.0}}, + {author: {age: {_eq: 30}}} + ]}) { + name + rating + } + }`, + Results: []map[string]any{ + { + "name": "Some Other Book", + "rating": 3.0, + }, + { + "name": "Painted House", + "rating": 4.9, + }, + }, + }, + testUtils.Request{ + Request: `query { + Book(filter: {_and: [ + {rating: {_ge: 4.0}}, + {author: {age: {_eq: 45}}} + ]}) { + name + rating + } + }`, + Results: []map[string]any{ + { + "name": "Some Book", + "rating": 4.0, + }, + }, + }, + testUtils.Request{ + // This is the same as {_not: {_and: [{rating: {_ge: 4.0}}, {author: {age: {_eq: 45}}}]}} + Request: `query { + Book(filter: {_not: { + rating: {_ge: 4.0}, + author: {age: {_eq: 45}} + }}) { + name + rating + } + }`, + Results: []map[string]any{ + { + "name": "Some Other Book", + "rating": 3.0, + }, + { + "name": "Painted House", + "rating": 4.9, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/query/simple/with_cid_doc_id_test.go b/tests/integration/query/simple/with_cid_doc_id_test.go index f19bc4e9fa..6fe41d1aae 100644 --- a/tests/integration/query/simple/with_cid_doc_id_test.go +++ b/tests/integration/query/simple/with_cid_doc_id_test.go @@ -93,7 +93,7 @@ func TestQuerySimpleWithCidAndDocID(t *testing.T) { testUtils.Request{ Request: `query { Users ( - cid: "bafybeidzstxabh7qktq7pkmmxvpjbnwklxz3h5l6d425ldvjy65xvvuxu4", + cid: "bafybeif757a4mdwimqwl24ujjnao6xlajiajz2hwuleopnptusuttri6zu", docID: "bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad" ) { name @@ -135,7 +135,7 @@ func TestQuerySimpleWithUpdateAndFirstCidAndDocID(t *testing.T) { testUtils.Request{ Request: `query { Users ( - cid: "bafybeidzstxabh7qktq7pkmmxvpjbnwklxz3h5l6d425ldvjy65xvvuxu4", + cid: "bafybeif757a4mdwimqwl24ujjnao6xlajiajz2hwuleopnptusuttri6zu", docID: "bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad" ) { name @@ -177,7 +177,7 @@ func TestQuerySimpleWithUpdateAndLastCidAndDocID(t *testing.T) { testUtils.Request{ Request: `query { Users ( - cid: "bafybeickytibhqnqtwhpjfi7ponnu5756ifo76oxb2ksxrz4iiqaywg3lu", + cid: "bafybeibwxvtvppws6sjfoajazevrdh27g4qwn5wguslpabyl3kzxd2a6fm", docID: "bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad" ) { name @@ -224,7 +224,7 @@ func TestQuerySimpleWithUpdateAndMiddleCidAndDocID(t *testing.T) { testUtils.Request{ Request: `query { Users ( - cid: "bafybeickytibhqnqtwhpjfi7ponnu5756ifo76oxb2ksxrz4iiqaywg3lu", + cid: "bafybeibwxvtvppws6sjfoajazevrdh27g4qwn5wguslpabyl3kzxd2a6fm", docID: "bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad" ) { name @@ -266,7 +266,7 @@ func TestQuerySimpleWithUpdateAndFirstCidAndDocIDAndSchemaVersion(t *testing.T) testUtils.Request{ Request: `query { Users ( - cid: "bafybeidzstxabh7qktq7pkmmxvpjbnwklxz3h5l6d425ldvjy65xvvuxu4", + cid: "bafybeif757a4mdwimqwl24ujjnao6xlajiajz2hwuleopnptusuttri6zu", docID: "bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad" ) { name @@ -280,7 +280,7 @@ func TestQuerySimpleWithUpdateAndFirstCidAndDocIDAndSchemaVersion(t *testing.T) "name": "John", "_version": []map[string]any{ { - "schemaVersionId": "bafkreiebcgze3rs6j3g7gu65dwskdg5fn3qby5c6nqffhbdkcy2l5bbvp4", + "schemaVersionId": "bafkreia3o3cetvcnnxyu5spucimoos77ifungfmacxdkva4zah2is3aooe", }, }, }, @@ -324,7 +324,7 @@ func TestCidAndDocIDQuery_ContainsPNCounterWithIntKind_NoError(t *testing.T) { testUtils.Request{ Request: `query { Users ( - cid: "bafybeiebqzqml6nn3laarr7yekakrsdnkn4nbgrl4xc5rshljp3in6au2m", + cid: "bafybeicruxxfhxhyvefbxid7gukdbnfzkyad45phu4mnwzzqde24p32xnu", docID: "bae-a688789e-d8a6-57a7-be09-22e005ab79e0" ) { name @@ -376,7 +376,101 @@ func TestCidAndDocIDQuery_ContainsPNCounterWithFloatKind_NoError(t *testing.T) { testUtils.Request{ Request: `query { Users ( - cid: "bafybeifzuh74aq47vjngkwipjne4r2gi3v2clewgsruspqirihnps4vcmu", + cid: "bafybeibeo7pmvzpkkanwd72q4qu3m4yxex3coufq7uogvcnjwgqzrlpco4", + docID: "bae-fa6a97e9-e0e9-5826-8a8c-57775d35e07c" + ) { + name + points + } + }`, + Results: []map[string]any{ + { + "name": "John", + "points": 10.2, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +// Note: Only the first CID is reproducible given the added entropy to the Counter CRDT type. +func TestCidAndDocIDQuery_ContainsPCounterWithIntKind_NoError(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query with first cid and docID with pcounter int type", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + points: Int @crdt(type: "pcounter") + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "points": 10 + }`, + }, + testUtils.UpdateDoc{ + Doc: `{ + "points": 20 + }`, + }, + testUtils.Request{ + Request: `query { + Users ( + cid: "bafybeidtjhrssohan2f5nt7ml3nh4bovpaqhqjvijlpacfednyx77iw5y4", + docID: "bae-a688789e-d8a6-57a7-be09-22e005ab79e0" + ) { + name + points + } + }`, + Results: []map[string]any{ + { + "name": "John", + "points": int64(10), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +// Note: Only the first CID is reproducible given the added entropy to the Counter CRDT type. +func TestCidAndDocIDQuery_ContainsPCounterWithFloatKind_NoError(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query with first cid and docID with pcounter and float type", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + points: Float @crdt(type: "pcounter") + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "points": 10.2 + }`, + }, + testUtils.UpdateDoc{ + Doc: `{ + "points": 20.6 + }`, + }, + testUtils.Request{ + Request: `query { + Users ( + cid: "bafybeieimeijjl4hdvqkt5gkn62j54nlnaetm4te7w4z2mdljlyphfsyji", docID: "bae-fa6a97e9-e0e9-5826-8a8c-57775d35e07c" ) { name diff --git a/tests/integration/query/simple/with_filter/with_in_test.go b/tests/integration/query/simple/with_filter/with_in_test.go index a43f19c37b..7e2aa6df82 100644 --- a/tests/integration/query/simple/with_filter/with_in_test.go +++ b/tests/integration/query/simple/with_filter/with_in_test.go @@ -60,6 +60,47 @@ func TestQuerySimpleWithIntInFilter(t *testing.T) { executeTestCase(t, test) } +func TestQuerySimpleWithIntInFilterOnFloat(t *testing.T) { + test := testUtils.RequestTestCase{ + Description: "Simple query with _in filter on float", + Request: `query { + Users(filter: {HeightM: {_in: [21, 21.2]}}) { + Name + } + }`, + Docs: map[int][]string{ + 0: { + `{ + "Name": "John", + "HeightM": 21.0 + }`, + `{ + "Name": "Bob", + "HeightM": 21.1 + }`, + `{ + "Name": "Carlo", + "HeightM": 21.2 + }`, + `{ + "Name": "Alice", + "HeightM": 21.3 + }`, + }, + }, + Results: []map[string]any{ + { + "Name": "John", + }, + { + "Name": "Carlo", + }, + }, + } + + executeTestCase(t, test) +} + func TestQuerySimpleWithIntInFilterWithNullValue(t *testing.T) { test := testUtils.RequestTestCase{ Description: "Simple query with special filter (or)", diff --git a/tests/integration/query/simple/with_order_test.go b/tests/integration/query/simple/with_order_test.go index f66241d944..1a1f966e60 100644 --- a/tests/integration/query/simple/with_order_test.go +++ b/tests/integration/query/simple/with_order_test.go @@ -22,6 +22,7 @@ func TestQuerySimpleWithEmptyOrder(t *testing.T) { Request: `query { Users(order: {}) { Name + Age } }`, Docs: map[int][]string{ diff --git a/tests/integration/query/simple/with_version_test.go b/tests/integration/query/simple/with_version_test.go index 0f3866f910..615e75a293 100644 --- a/tests/integration/query/simple/with_version_test.go +++ b/tests/integration/query/simple/with_version_test.go @@ -46,14 +46,14 @@ func TestQuerySimpleWithEmbeddedLatestCommit(t *testing.T) { "Age": int64(21), "_version": []map[string]any{ { - "cid": "bafybeicojqe66grk564b2hns3zi6rhquqvugxj6wi4s6xk4e2gg65dzx5e", + "cid": "bafybeigxe467aute545c52e27ll3yun7rpkledh5tbjhxxs2i76dzkfdom", "links": []map[string]any{ { - "cid": "bafybeic45t5rj54wx47fhaqm6dubwt2cf5fkqzwm2nea7ypam3f6s2zbk4", + "cid": "bafybeigdvbqfwrm6dxfnfv4srbue5agzpyzoifl77ix6df7k5pjhat3fwu", "name": "Age", }, { - "cid": "bafybeifkcrogypyaq5iw7krgi5jd26s7jlfsy5u232e7e7y7dqe3wm2hcu", + "cid": "bafybeicurnibuf3b6krgqm3sh2ohmvxiodvawagx2evod573z67xf54zxu", "name": "Name", }, }, @@ -90,7 +90,7 @@ func TestQuerySimpleWithEmbeddedLatestCommitWithSchemaVersionId(t *testing.T) { "Name": "John", "_version": []map[string]any{ { - "schemaVersionId": "bafkreiekkppcdl573ru624wh3kwkmy2nhqzjsvqpu6jv5dgq2kidpnon4u", + "schemaVersionId": "bafkreigqmcqzkbg3elpe24vfza4rjle2r6cxu7ihzvg56aov57crhaebry", }, }, }, @@ -171,14 +171,14 @@ func TestQuerySimpleWithMultipleAliasedEmbeddedLatestCommit(t *testing.T) { "Age": int64(21), "_version": []map[string]any{ { - "cid": "bafybeicojqe66grk564b2hns3zi6rhquqvugxj6wi4s6xk4e2gg65dzx5e", + "cid": "bafybeigxe467aute545c52e27ll3yun7rpkledh5tbjhxxs2i76dzkfdom", "L1": []map[string]any{ { - "cid": "bafybeic45t5rj54wx47fhaqm6dubwt2cf5fkqzwm2nea7ypam3f6s2zbk4", + "cid": "bafybeigdvbqfwrm6dxfnfv4srbue5agzpyzoifl77ix6df7k5pjhat3fwu", "name": "Age", }, { - "cid": "bafybeifkcrogypyaq5iw7krgi5jd26s7jlfsy5u232e7e7y7dqe3wm2hcu", + "cid": "bafybeicurnibuf3b6krgqm3sh2ohmvxiodvawagx2evod573z67xf54zxu", "name": "Name", }, }, @@ -242,7 +242,7 @@ func TestQuery_WithAllCommitFields_NoError(t *testing.T) { "_docID": docID, "_version": []map[string]any{ { - "cid": "bafybeicojqe66grk564b2hns3zi6rhquqvugxj6wi4s6xk4e2gg65dzx5e", + "cid": "bafybeigxe467aute545c52e27ll3yun7rpkledh5tbjhxxs2i76dzkfdom", "collectionID": int64(1), "delta": nil, "docID": "bae-52b9170d-b77a-5887-b877-cbdbb99b009f", @@ -251,15 +251,15 @@ func TestQuery_WithAllCommitFields_NoError(t *testing.T) { "height": int64(1), "links": []map[string]any{ { - "cid": "bafybeic45t5rj54wx47fhaqm6dubwt2cf5fkqzwm2nea7ypam3f6s2zbk4", + "cid": "bafybeigdvbqfwrm6dxfnfv4srbue5agzpyzoifl77ix6df7k5pjhat3fwu", "name": "Age", }, { - "cid": "bafybeifkcrogypyaq5iw7krgi5jd26s7jlfsy5u232e7e7y7dqe3wm2hcu", + "cid": "bafybeicurnibuf3b6krgqm3sh2ohmvxiodvawagx2evod573z67xf54zxu", "name": "Name", }, }, - "schemaVersionId": "bafkreiekkppcdl573ru624wh3kwkmy2nhqzjsvqpu6jv5dgq2kidpnon4u", + "schemaVersionId": "bafkreigqmcqzkbg3elpe24vfza4rjle2r6cxu7ihzvg56aov57crhaebry", }, }, }, @@ -321,7 +321,7 @@ func TestQuery_WithAllCommitFieldsWithUpdate_NoError(t *testing.T) { "_docID": docID, "_version": []map[string]any{ { - "cid": "bafybeigcjabzlkuj4j35boczgcl4jmars7gz5a7dfvpq3m344bzth7ebqq", + "cid": "bafybeibpezk2dgdlyavsh3k7vbmgh3iwanqhkzo4byafgytjdv5c7xy73u", "collectionID": int64(1), "delta": nil, "docID": "bae-52b9170d-b77a-5887-b877-cbdbb99b009f", @@ -330,18 +330,18 @@ func TestQuery_WithAllCommitFieldsWithUpdate_NoError(t *testing.T) { "height": int64(2), "links": []map[string]any{ { - "cid": "bafybeihzra5nmcai4omdv2hkplrpexjsau62eaa2ndrf2b7ksxvl7hx3qm", + "cid": "bafybeihidcg4gkm6bnlyyghr5cq5dkn6x5a4l347amy7odsy5rkd7eu4qu", "name": "Age", }, { - "cid": "bafybeicojqe66grk564b2hns3zi6rhquqvugxj6wi4s6xk4e2gg65dzx5e", + "cid": "bafybeigxe467aute545c52e27ll3yun7rpkledh5tbjhxxs2i76dzkfdom", "name": "_head", }, }, - "schemaVersionId": "bafkreiekkppcdl573ru624wh3kwkmy2nhqzjsvqpu6jv5dgq2kidpnon4u", + "schemaVersionId": "bafkreigqmcqzkbg3elpe24vfza4rjle2r6cxu7ihzvg56aov57crhaebry", }, { - "cid": "bafybeicojqe66grk564b2hns3zi6rhquqvugxj6wi4s6xk4e2gg65dzx5e", + "cid": "bafybeigxe467aute545c52e27ll3yun7rpkledh5tbjhxxs2i76dzkfdom", "collectionID": int64(1), "delta": nil, "docID": "bae-52b9170d-b77a-5887-b877-cbdbb99b009f", @@ -350,15 +350,15 @@ func TestQuery_WithAllCommitFieldsWithUpdate_NoError(t *testing.T) { "height": int64(1), "links": []map[string]any{ { - "cid": "bafybeic45t5rj54wx47fhaqm6dubwt2cf5fkqzwm2nea7ypam3f6s2zbk4", + "cid": "bafybeigdvbqfwrm6dxfnfv4srbue5agzpyzoifl77ix6df7k5pjhat3fwu", "name": "Age", }, { - "cid": "bafybeifkcrogypyaq5iw7krgi5jd26s7jlfsy5u232e7e7y7dqe3wm2hcu", + "cid": "bafybeicurnibuf3b6krgqm3sh2ohmvxiodvawagx2evod573z67xf54zxu", "name": "Name", }, }, - "schemaVersionId": "bafkreiekkppcdl573ru624wh3kwkmy2nhqzjsvqpu6jv5dgq2kidpnon4u", + "schemaVersionId": "bafkreigqmcqzkbg3elpe24vfza4rjle2r6cxu7ihzvg56aov57crhaebry", }, }, }, diff --git a/tests/integration/schema/crdt_type_test.go b/tests/integration/schema/crdt_type_test.go index 073a8e4e83..2a321ef751 100644 --- a/tests/integration/schema/crdt_type_test.go +++ b/tests/integration/schema/crdt_type_test.go @@ -20,7 +20,7 @@ import ( ) func TestSchemaCreate_ContainsPNCounterTypeWithIntKind_NoError(t *testing.T) { - schemaVersionID := "bafkreib2rcnzkjrwabw6kx7qnncfuylugukoosilmb2dct5qylmgec7fdu" + schemaVersionID := "bafkreigsnu67poxm3663e7vl5cncl6pxdzndcc7jf66cnnvxzw5uko5iuu" test := testUtils.TestCase{ Actions: []any{ @@ -59,7 +59,7 @@ func TestSchemaCreate_ContainsPNCounterTypeWithIntKind_NoError(t *testing.T) { } func TestSchemaCreate_ContainsPNCounterTypeWithFloatKind_NoError(t *testing.T) { - schemaVersionID := "bafkreiddz4h2oqi3qzfeqfbjt3wpwrvtm62r4l6uche2nxyullmlmezrsq" + schemaVersionID := "bafkreieflo3tkhsywsqcyzoj6nqgxc6ovv5m5lc7bfbum6yqls5rxlwkye" test := testUtils.TestCase{ Actions: []any{ @@ -130,3 +130,98 @@ func TestSchemaCreate_ContainsPNCounterWithInvalidType_Error(t *testing.T) { testUtils.ExecuteTestCase(t, test) } + +func TestSchemaCreate_ContainsPCounterTypeWithIntKind_NoError(t *testing.T) { + schemaVersionID := "bafkreigbmy67fjsys3li5rbs64k3vezvdtbfryc67pxiju4nis7lrbanea" + + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + points: Int @crdt(type: "pcounter") + } + `, + }, + testUtils.GetSchema{ + VersionID: immutable.Some(schemaVersionID), + ExpectedResults: []client.SchemaDescription{ + { + Name: "Users", + VersionID: schemaVersionID, + Root: schemaVersionID, + Fields: []client.SchemaFieldDescription{ + { + Name: "_docID", + Kind: client.FieldKind_DocID, + }, + { + Name: "points", + Kind: client.FieldKind_NILLABLE_INT, + Typ: client.P_COUNTER, + }, + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestSchemaCreate_ContainsPCounterTypeWithFloatKind_NoError(t *testing.T) { + schemaVersionID := "bafkreifcyba45ov5zqi6dbhlu72rmf4wp3crjynjvvpq6iuauns2ofbvzi" + + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + points: Float @crdt(type: "pcounter") + } + `, + }, + testUtils.GetSchema{ + VersionID: immutable.Some(schemaVersionID), + ExpectedResults: []client.SchemaDescription{ + { + Name: "Users", + VersionID: schemaVersionID, + Root: schemaVersionID, + Fields: []client.SchemaFieldDescription{ + { + Name: "_docID", + Kind: client.FieldKind_DocID, + }, + { + Name: "points", + Kind: client.FieldKind_NILLABLE_FLOAT, + Typ: client.P_COUNTER, + }, + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestSchemaCreate_ContainsPCounterTypeWithWrongKind_Error(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + points: String @crdt(type: "pcounter") + } + `, + ExpectedError: "CRDT type pcounter can't be assigned to field kind String", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/schema/get_schema_test.go b/tests/integration/schema/get_schema_test.go index f809b58627..a89f4a2eb9 100644 --- a/tests/integration/schema/get_schema_test.go +++ b/tests/integration/schema/get_schema_test.go @@ -71,9 +71,9 @@ func TestGetSchema_GivenNoSchemaGivenUnknownName(t *testing.T) { } func TestGetSchema_ReturnsAllSchema(t *testing.T) { - usersSchemaVersion1ID := "bafkreiegrxzoqa3mdgjsfz2vuatbpjbnqxub6yi23dvdumjpt4g3nhiwzq" - usersSchemaVersion2ID := "bafkreidic23paxtc5sannovwkpp6kmpg7xufufz4dgxjsiq2exk2wieh4a" - booksSchemaVersion1ID := "bafkreiakx6sdz3govsorfppdv2pru4fgjzt2qljgjhpkxnkyr7kl4vhdme" + usersSchemaVersion1ID := "bafkreia2jn5ecrhtvy4fravk6pm3wqiny46m7mqymvjkgat7xiqupgqoai" + usersSchemaVersion2ID := "bafkreibbsqjeladin2keszmja5kektzgi4eowb6m3oimxssiqge7mmvhva" + booksSchemaVersion1ID := "bafkreibiu34zrehpq346pwp5z24qkderm7ibhnpcqalhkivhnf5e2afqoy" test := testUtils.TestCase{ Actions: []any{ @@ -98,9 +98,9 @@ func TestGetSchema_ReturnsAllSchema(t *testing.T) { testUtils.GetSchema{ ExpectedResults: []client.SchemaDescription{ { - Name: "Books", - Root: booksSchemaVersion1ID, - VersionID: booksSchemaVersion1ID, + Name: "Users", + Root: usersSchemaVersion1ID, + VersionID: usersSchemaVersion1ID, Fields: []client.SchemaFieldDescription{ { Name: "_docID", @@ -126,9 +126,9 @@ func TestGetSchema_ReturnsAllSchema(t *testing.T) { }, }, { - Name: "Users", - Root: usersSchemaVersion1ID, - VersionID: usersSchemaVersion1ID, + Name: "Books", + Root: booksSchemaVersion1ID, + VersionID: booksSchemaVersion1ID, Fields: []client.SchemaFieldDescription{ { Name: "_docID", @@ -145,8 +145,8 @@ func TestGetSchema_ReturnsAllSchema(t *testing.T) { } func TestGetSchema_ReturnsSchemaForGivenRoot(t *testing.T) { - usersSchemaVersion1ID := "bafkreiegrxzoqa3mdgjsfz2vuatbpjbnqxub6yi23dvdumjpt4g3nhiwzq" - usersSchemaVersion2ID := "bafkreidic23paxtc5sannovwkpp6kmpg7xufufz4dgxjsiq2exk2wieh4a" + usersSchemaVersion1ID := "bafkreia2jn5ecrhtvy4fravk6pm3wqiny46m7mqymvjkgat7xiqupgqoai" + usersSchemaVersion2ID := "bafkreibbsqjeladin2keszmja5kektzgi4eowb6m3oimxssiqge7mmvhva" test := testUtils.TestCase{ Actions: []any{ @@ -174,28 +174,28 @@ func TestGetSchema_ReturnsSchemaForGivenRoot(t *testing.T) { { Name: "Users", Root: usersSchemaVersion1ID, - VersionID: usersSchemaVersion2ID, + VersionID: usersSchemaVersion1ID, Fields: []client.SchemaFieldDescription{ { Name: "_docID", Kind: client.FieldKind_DocID, - Typ: client.LWW_REGISTER, - }, - { - Name: "name", - Kind: client.FieldKind_NILLABLE_STRING, - Typ: client.LWW_REGISTER, }, }, }, { Name: "Users", Root: usersSchemaVersion1ID, - VersionID: usersSchemaVersion1ID, + VersionID: usersSchemaVersion2ID, Fields: []client.SchemaFieldDescription{ { Name: "_docID", Kind: client.FieldKind_DocID, + Typ: client.LWW_REGISTER, + }, + { + Name: "name", + Kind: client.FieldKind_NILLABLE_STRING, + Typ: client.LWW_REGISTER, }, }, }, @@ -208,8 +208,8 @@ func TestGetSchema_ReturnsSchemaForGivenRoot(t *testing.T) { } func TestGetSchema_ReturnsSchemaForGivenName(t *testing.T) { - usersSchemaVersion1ID := "bafkreiegrxzoqa3mdgjsfz2vuatbpjbnqxub6yi23dvdumjpt4g3nhiwzq" - usersSchemaVersion2ID := "bafkreidic23paxtc5sannovwkpp6kmpg7xufufz4dgxjsiq2exk2wieh4a" + usersSchemaVersion1ID := "bafkreia2jn5ecrhtvy4fravk6pm3wqiny46m7mqymvjkgat7xiqupgqoai" + usersSchemaVersion2ID := "bafkreibbsqjeladin2keszmja5kektzgi4eowb6m3oimxssiqge7mmvhva" test := testUtils.TestCase{ Actions: []any{ @@ -237,28 +237,28 @@ func TestGetSchema_ReturnsSchemaForGivenName(t *testing.T) { { Name: "Users", Root: usersSchemaVersion1ID, - VersionID: usersSchemaVersion2ID, + VersionID: usersSchemaVersion1ID, Fields: []client.SchemaFieldDescription{ { Name: "_docID", Kind: client.FieldKind_DocID, - Typ: client.LWW_REGISTER, - }, - { - Name: "name", - Kind: client.FieldKind_NILLABLE_STRING, - Typ: client.LWW_REGISTER, }, }, }, { Name: "Users", Root: usersSchemaVersion1ID, - VersionID: usersSchemaVersion1ID, + VersionID: usersSchemaVersion2ID, Fields: []client.SchemaFieldDescription{ { Name: "_docID", Kind: client.FieldKind_DocID, + Typ: client.LWW_REGISTER, + }, + { + Name: "name", + Kind: client.FieldKind_NILLABLE_STRING, + Typ: client.LWW_REGISTER, }, }, }, diff --git a/tests/integration/schema/migrations/query/simple_test.go b/tests/integration/schema/migrations/query/simple_test.go index c80b1386dd..a588e70e87 100644 --- a/tests/integration/schema/migrations/query/simple_test.go +++ b/tests/integration/schema/migrations/query/simple_test.go @@ -45,8 +45,8 @@ func TestSchemaMigrationQuery(t *testing.T) { }, testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreiebcgze3rs6j3g7gu65dwskdg5fn3qby5c6nqffhbdkcy2l5bbvp4", - DestinationSchemaVersionID: "bafkreiexwzcpjuz3eaghcanr3fnmyc6el5w6i5ovhop5zfrqctucwlraba", + SourceSchemaVersionID: "bafkreia3o3cetvcnnxyu5spucimoos77ifungfmacxdkva4zah2is3aooe", + DestinationSchemaVersionID: "bafkreib5jaawobqqiu6frzacerlj55pxxxuql3igqj4ldmg2pgilke4bty", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -115,8 +115,8 @@ func TestSchemaMigrationQueryMultipleDocs(t *testing.T) { }, testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreiebcgze3rs6j3g7gu65dwskdg5fn3qby5c6nqffhbdkcy2l5bbvp4", - DestinationSchemaVersionID: "bafkreiexwzcpjuz3eaghcanr3fnmyc6el5w6i5ovhop5zfrqctucwlraba", + SourceSchemaVersionID: "bafkreia3o3cetvcnnxyu5spucimoos77ifungfmacxdkva4zah2is3aooe", + DestinationSchemaVersionID: "bafkreib5jaawobqqiu6frzacerlj55pxxxuql3igqj4ldmg2pgilke4bty", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -178,8 +178,8 @@ func TestSchemaMigrationQueryWithMigrationRegisteredBeforeSchemaPatch(t *testing }, testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreiebcgze3rs6j3g7gu65dwskdg5fn3qby5c6nqffhbdkcy2l5bbvp4", - DestinationSchemaVersionID: "bafkreiexwzcpjuz3eaghcanr3fnmyc6el5w6i5ovhop5zfrqctucwlraba", + SourceSchemaVersionID: "bafkreia3o3cetvcnnxyu5spucimoos77ifungfmacxdkva4zah2is3aooe", + DestinationSchemaVersionID: "bafkreib5jaawobqqiu6frzacerlj55pxxxuql3igqj4ldmg2pgilke4bty", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -254,8 +254,8 @@ func TestSchemaMigrationQueryMigratesToIntermediaryVersion(t *testing.T) { // Register a migration from schema version 1 to schema version 2 **only** - // there should be no migration from version 2 to version 3. LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreiebcgze3rs6j3g7gu65dwskdg5fn3qby5c6nqffhbdkcy2l5bbvp4", - DestinationSchemaVersionID: "bafkreiexwzcpjuz3eaghcanr3fnmyc6el5w6i5ovhop5zfrqctucwlraba", + SourceSchemaVersionID: "bafkreia3o3cetvcnnxyu5spucimoos77ifungfmacxdkva4zah2is3aooe", + DestinationSchemaVersionID: "bafkreib5jaawobqqiu6frzacerlj55pxxxuql3igqj4ldmg2pgilke4bty", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -325,8 +325,8 @@ func TestSchemaMigrationQueryMigratesFromIntermediaryVersion(t *testing.T) { // Register a migration from schema version 2 to schema version 3 **only** - // there should be no migration from version 1 to version 2. LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreiexwzcpjuz3eaghcanr3fnmyc6el5w6i5ovhop5zfrqctucwlraba", - DestinationSchemaVersionID: "bafkreicyyn7ourjvr2o6bqa57z2bl5wz5u2ykdlmd5v7n53cw7l6xsdplm", + SourceSchemaVersionID: "bafkreib5jaawobqqiu6frzacerlj55pxxxuql3igqj4ldmg2pgilke4bty", + DestinationSchemaVersionID: "bafkreib65lld2tdyvlilbumlcccftqwvflpgutugghf5afrnlhdg7dgyv4", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -394,8 +394,8 @@ func TestSchemaMigrationQueryMigratesAcrossMultipleVersions(t *testing.T) { }, testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreiebcgze3rs6j3g7gu65dwskdg5fn3qby5c6nqffhbdkcy2l5bbvp4", - DestinationSchemaVersionID: "bafkreiexwzcpjuz3eaghcanr3fnmyc6el5w6i5ovhop5zfrqctucwlraba", + SourceSchemaVersionID: "bafkreia3o3cetvcnnxyu5spucimoos77ifungfmacxdkva4zah2is3aooe", + DestinationSchemaVersionID: "bafkreib5jaawobqqiu6frzacerlj55pxxxuql3igqj4ldmg2pgilke4bty", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -411,8 +411,8 @@ func TestSchemaMigrationQueryMigratesAcrossMultipleVersions(t *testing.T) { }, testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreiexwzcpjuz3eaghcanr3fnmyc6el5w6i5ovhop5zfrqctucwlraba", - DestinationSchemaVersionID: "bafkreicyyn7ourjvr2o6bqa57z2bl5wz5u2ykdlmd5v7n53cw7l6xsdplm", + SourceSchemaVersionID: "bafkreib5jaawobqqiu6frzacerlj55pxxxuql3igqj4ldmg2pgilke4bty", + DestinationSchemaVersionID: "bafkreib65lld2tdyvlilbumlcccftqwvflpgutugghf5afrnlhdg7dgyv4", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -466,8 +466,8 @@ func TestSchemaMigrationQueryMigratesAcrossMultipleVersionsBeforePatches(t *test }, testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreiebcgze3rs6j3g7gu65dwskdg5fn3qby5c6nqffhbdkcy2l5bbvp4", - DestinationSchemaVersionID: "bafkreiexwzcpjuz3eaghcanr3fnmyc6el5w6i5ovhop5zfrqctucwlraba", + SourceSchemaVersionID: "bafkreia3o3cetvcnnxyu5spucimoos77ifungfmacxdkva4zah2is3aooe", + DestinationSchemaVersionID: "bafkreib5jaawobqqiu6frzacerlj55pxxxuql3igqj4ldmg2pgilke4bty", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -483,8 +483,8 @@ func TestSchemaMigrationQueryMigratesAcrossMultipleVersionsBeforePatches(t *test }, testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreiexwzcpjuz3eaghcanr3fnmyc6el5w6i5ovhop5zfrqctucwlraba", - DestinationSchemaVersionID: "bafkreicyyn7ourjvr2o6bqa57z2bl5wz5u2ykdlmd5v7n53cw7l6xsdplm", + SourceSchemaVersionID: "bafkreib5jaawobqqiu6frzacerlj55pxxxuql3igqj4ldmg2pgilke4bty", + DestinationSchemaVersionID: "bafkreib65lld2tdyvlilbumlcccftqwvflpgutugghf5afrnlhdg7dgyv4", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -553,8 +553,8 @@ func TestSchemaMigrationQueryMigratesAcrossMultipleVersionsBeforePatchesWrongOrd testUtils.ConfigureMigration{ // Declare the migration from v2=>v3 before declaring the migration from v1=>v2 LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreiexwzcpjuz3eaghcanr3fnmyc6el5w6i5ovhop5zfrqctucwlraba", - DestinationSchemaVersionID: "bafkreicyyn7ourjvr2o6bqa57z2bl5wz5u2ykdlmd5v7n53cw7l6xsdplm", + SourceSchemaVersionID: "bafkreib5jaawobqqiu6frzacerlj55pxxxuql3igqj4ldmg2pgilke4bty", + DestinationSchemaVersionID: "bafkreib65lld2tdyvlilbumlcccftqwvflpgutugghf5afrnlhdg7dgyv4", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -570,8 +570,8 @@ func TestSchemaMigrationQueryMigratesAcrossMultipleVersionsBeforePatchesWrongOrd }, testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreiebcgze3rs6j3g7gu65dwskdg5fn3qby5c6nqffhbdkcy2l5bbvp4", - DestinationSchemaVersionID: "bafkreiexwzcpjuz3eaghcanr3fnmyc6el5w6i5ovhop5zfrqctucwlraba", + SourceSchemaVersionID: "bafkreia3o3cetvcnnxyu5spucimoos77ifungfmacxdkva4zah2is3aooe", + DestinationSchemaVersionID: "bafkreib5jaawobqqiu6frzacerlj55pxxxuql3igqj4ldmg2pgilke4bty", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -712,8 +712,8 @@ func TestSchemaMigrationQueryMigrationMutatesExistingScalarField(t *testing.T) { }, testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreiebcgze3rs6j3g7gu65dwskdg5fn3qby5c6nqffhbdkcy2l5bbvp4", - DestinationSchemaVersionID: "bafkreiexwzcpjuz3eaghcanr3fnmyc6el5w6i5ovhop5zfrqctucwlraba", + SourceSchemaVersionID: "bafkreia3o3cetvcnnxyu5spucimoos77ifungfmacxdkva4zah2is3aooe", + DestinationSchemaVersionID: "bafkreib5jaawobqqiu6frzacerlj55pxxxuql3igqj4ldmg2pgilke4bty", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -773,8 +773,8 @@ func TestSchemaMigrationQueryMigrationMutatesExistingInlineArrayField(t *testing }, testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreiasjk4ypvsmdiebxadvhdnpvq4eun6wielebzlcnipyqr357bz7ou", - DestinationSchemaVersionID: "bafkreie7zotytkhmsp7ro5dqyf75fwrafos4xowgatalicbcb3lu5lfade", + SourceSchemaVersionID: "bafkreicn6ltdovb6y7g3ecoptqkvx2y5y5yntrb5uydmg3jiakskqva2ta", + DestinationSchemaVersionID: "bafkreifv4vhz3dw7upc5u3omsqi6klz3h3e54ogfskp72gtut62fuxqrcu", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -836,8 +836,8 @@ func TestSchemaMigrationQueryMigrationRemovesExistingField(t *testing.T) { }, testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreiewca6o66mgkpbai2vtrupolvtf66wllbvouvtwo6fkc6alrybzfa", - DestinationSchemaVersionID: "bafkreibqzsrn3acwn7hkakm2ko5i4t5pdarmylvodi5tnpxunfcwmut2ua", + SourceSchemaVersionID: "bafkreihhd6bqrjhl5zidwztgxzeseveplv3cj3fwtn3unjkdx7j2vr2vrq", + DestinationSchemaVersionID: "bafkreiegvk3fkcjxoqqpp7npxqjdjwijiwthvynzmsvtzajpjevgu2krku", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -897,8 +897,8 @@ func TestSchemaMigrationQueryMigrationPreservesExistingFieldWhenFieldNotRequeste }, testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreiewca6o66mgkpbai2vtrupolvtf66wllbvouvtwo6fkc6alrybzfa", - DestinationSchemaVersionID: "bafkreibqzsrn3acwn7hkakm2ko5i4t5pdarmylvodi5tnpxunfcwmut2ua", + SourceSchemaVersionID: "bafkreihhd6bqrjhl5zidwztgxzeseveplv3cj3fwtn3unjkdx7j2vr2vrq", + DestinationSchemaVersionID: "bafkreiegvk3fkcjxoqqpp7npxqjdjwijiwthvynzmsvtzajpjevgu2krku", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -971,8 +971,8 @@ func TestSchemaMigrationQueryMigrationCopiesExistingFieldWhenSrcFieldNotRequeste }, testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreiewca6o66mgkpbai2vtrupolvtf66wllbvouvtwo6fkc6alrybzfa", - DestinationSchemaVersionID: "bafkreicf3nvrorgv2v6czh2lkakibv4me2il5xxytqxfyof7jlmkkdkle4", + SourceSchemaVersionID: "bafkreihhd6bqrjhl5zidwztgxzeseveplv3cj3fwtn3unjkdx7j2vr2vrq", + DestinationSchemaVersionID: "bafkreidgnuvanzqur3pkp4mmrd77ojwvov2rlczraaks4435e6wsgxpwoq", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -1033,8 +1033,8 @@ func TestSchemaMigrationQueryMigrationCopiesExistingFieldWhenSrcAndDstFieldNotRe }, testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreiewca6o66mgkpbai2vtrupolvtf66wllbvouvtwo6fkc6alrybzfa", - DestinationSchemaVersionID: "bafkreicf3nvrorgv2v6czh2lkakibv4me2il5xxytqxfyof7jlmkkdkle4", + SourceSchemaVersionID: "bafkreihhd6bqrjhl5zidwztgxzeseveplv3cj3fwtn3unjkdx7j2vr2vrq", + DestinationSchemaVersionID: "bafkreidgnuvanzqur3pkp4mmrd77ojwvov2rlczraaks4435e6wsgxpwoq", Lens: model.Lens{ Lenses: []model.LensModule{ { diff --git a/tests/integration/schema/migrations/query/with_doc_id_test.go b/tests/integration/schema/migrations/query/with_doc_id_test.go index 3acb7ab890..ee175515dc 100644 --- a/tests/integration/schema/migrations/query/with_doc_id_test.go +++ b/tests/integration/schema/migrations/query/with_doc_id_test.go @@ -52,8 +52,8 @@ func TestSchemaMigrationQueryByDocID(t *testing.T) { }, testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreiebcgze3rs6j3g7gu65dwskdg5fn3qby5c6nqffhbdkcy2l5bbvp4", - DestinationSchemaVersionID: "bafkreiexwzcpjuz3eaghcanr3fnmyc6el5w6i5ovhop5zfrqctucwlraba", + SourceSchemaVersionID: "bafkreia3o3cetvcnnxyu5spucimoos77ifungfmacxdkva4zah2is3aooe", + DestinationSchemaVersionID: "bafkreib5jaawobqqiu6frzacerlj55pxxxuql3igqj4ldmg2pgilke4bty", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -158,8 +158,8 @@ func TestSchemaMigrationQueryMultipleQueriesByDocID(t *testing.T) { }, testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreiebcgze3rs6j3g7gu65dwskdg5fn3qby5c6nqffhbdkcy2l5bbvp4", - DestinationSchemaVersionID: "bafkreiexwzcpjuz3eaghcanr3fnmyc6el5w6i5ovhop5zfrqctucwlraba", + SourceSchemaVersionID: "bafkreia3o3cetvcnnxyu5spucimoos77ifungfmacxdkva4zah2is3aooe", + DestinationSchemaVersionID: "bafkreib5jaawobqqiu6frzacerlj55pxxxuql3igqj4ldmg2pgilke4bty", Lens: model.Lens{ Lenses: []model.LensModule{ { diff --git a/tests/integration/schema/migrations/query/with_inverse_test.go b/tests/integration/schema/migrations/query/with_inverse_test.go new file mode 100644 index 0000000000..f436c332c0 --- /dev/null +++ b/tests/integration/schema/migrations/query/with_inverse_test.go @@ -0,0 +1,114 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package query + +import ( + "testing" + + "github.com/lens-vm/lens/host-go/config/model" + + "github.com/sourcenetwork/defradb/client" + testUtils "github.com/sourcenetwork/defradb/tests/integration" + "github.com/sourcenetwork/defradb/tests/lenses" +) + +func TestSchemaMigrationQueryInversesAcrossMultipleVersions(t *testing.T) { + test := testUtils.TestCase{ + Description: "Test schema migration, inverses across multiple migrated versions", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + age: Int + height: Int + } + `, + }, + testUtils.SchemaPatch{ + Patch: ` + [ + { "op": "add", "path": "/Users/Fields/-", "value": {"Name": "verified", "Kind": "Boolean"} } + ] + `, + }, + testUtils.SchemaPatch{ + Patch: ` + [ + { "op": "add", "path": "/Users/Fields/-", "value": {"Name": "email", "Kind": "String"} } + ] + `, + }, + testUtils.ConfigureMigration{ + LensConfig: client.LensConfig{ + SourceSchemaVersionID: "bafkreicdkt3m6mgwuoix7qyijvwxwtj3dlre4a4c6mdnqbucbndwuxjsvi", + DestinationSchemaVersionID: "bafkreibpaw4dxy6bvmuoyegm7bwxyi24nubozmukemwiour4v62kz5ffuu", + Lens: model.Lens{ + Lenses: []model.LensModule{ + { + Path: lenses.SetDefaultModulePath, + Arguments: map[string]any{ + "dst": "age", + "value": 30, + }, + }, + }, + }, + }, + }, + testUtils.ConfigureMigration{ + LensConfig: client.LensConfig{ + SourceSchemaVersionID: "bafkreibpaw4dxy6bvmuoyegm7bwxyi24nubozmukemwiour4v62kz5ffuu", + DestinationSchemaVersionID: "bafkreickm4zodm2muw5qcctmssht63g57u7kxujqyoax4zb5c42zs4pdh4", + Lens: model.Lens{ + Lenses: []model.LensModule{ + { + Path: lenses.SetDefaultModulePath, + Arguments: map[string]any{ + "dst": "height", + "value": 190, + }, + }, + }, + }, + }, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "age": 33, + "height": 185 + }`, + }, + testUtils.SetActiveSchemaVersion{ + SchemaVersionID: "bafkreicdkt3m6mgwuoix7qyijvwxwtj3dlre4a4c6mdnqbucbndwuxjsvi", + }, + testUtils.Request{ + Request: `query { + Users { + name + age + height + } + }`, + Results: []map[string]any{ + { + "name": "John", + "age": nil, + "height": nil, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/schema/migrations/query/with_p2p_schema_branch_test.go b/tests/integration/schema/migrations/query/with_p2p_schema_branch_test.go new file mode 100644 index 0000000000..b5e7bdde03 --- /dev/null +++ b/tests/integration/schema/migrations/query/with_p2p_schema_branch_test.go @@ -0,0 +1,138 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package query + +import ( + "testing" + + "github.com/lens-vm/lens/host-go/config/model" + "github.com/sourcenetwork/immutable" + + "github.com/sourcenetwork/defradb/client" + testUtils "github.com/sourcenetwork/defradb/tests/integration" + "github.com/sourcenetwork/defradb/tests/lenses" +) + +func TestSchemaMigrationQueryWithP2PReplicatedDocOnOtherSchemaBranch(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.RandomNetworkingConfig(), + testUtils.RandomNetworkingConfig(), + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + verified: Boolean + } + `, + }, + testUtils.SchemaPatch{ + // Patch first node only + NodeID: immutable.Some(0), + Patch: ` + [ + { "op": "add", "path": "/Users/Fields/-", "value": {"Name": "email", "Kind": "String"} } + ] + `, + }, + testUtils.ConfigureMigration{ + // Register the migration on both nodes. + LensConfig: client.LensConfig{ + SourceSchemaVersionID: "bafkreibpai5hfnalhtn5mgamzkgml4gwftow7pklmjcn6i4sqey6a5u5ce", + DestinationSchemaVersionID: "bafkreidrbhf54zckhmchzw2ngbobfqtkt7sm6ihbliu2wtxesehz5g4xwm", + Lens: model.Lens{ + Lenses: []model.LensModule{ + { + Path: lenses.SetDefaultModulePath, + Arguments: map[string]any{ + "dst": "name", + "value": "Fred", + }, + }, + }, + }, + }, + }, + testUtils.SchemaPatch{ + // Patch second node with different patch + NodeID: immutable.Some(1), + Patch: ` + [ + { "op": "add", "path": "/Users/Fields/-", "value": {"Name": "phone", "Kind": 11} } + ] + `, + Lens: immutable.Some(model.Lens{ + Lenses: []model.LensModule{ + { + Path: lenses.SetDefaultModulePath, + Arguments: map[string]any{ + "dst": "phone", + "value": "1234567890", + }, + }, + }, + }), + }, + testUtils.ConfigureReplicator{ + SourceNodeID: 0, + TargetNodeID: 1, + }, + testUtils.CreateDoc{ + // Create John on the first (source) node only, and allow the value to sync + NodeID: immutable.Some(0), + Doc: `{ + "name": "John", + "verified": true + }`, + }, + testUtils.WaitForSync{}, + testUtils.Request{ + NodeID: immutable.Some(0), + Request: `query { + Users { + name + verified + } + }`, + Results: []map[string]any{ + { + "name": "John", + "verified": true, + }, + }, + }, + testUtils.Request{ + // Node 1 should yield results migrated down to schema version 1, then up to schema version 3. + NodeID: immutable.Some(1), + Request: ` + query { + Users { + name + phone + verified + } + } + `, + Results: []map[string]any{ + { + // name has been cleared by the inverse of the migration from version 1 to 2 + "name": nil, + // phone has been set by the migration from version 1 to 3 + "phone": "1234567890", + "verified": true, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/schema/migrations/query/with_p2p_test.go b/tests/integration/schema/migrations/query/with_p2p_test.go index 2b22fba89d..f8b0197d5d 100644 --- a/tests/integration/schema/migrations/query/with_p2p_test.go +++ b/tests/integration/schema/migrations/query/with_p2p_test.go @@ -46,8 +46,8 @@ func TestSchemaMigrationQueryWithP2PReplicatedDocAtOlderSchemaVersion(t *testing testUtils.ConfigureMigration{ // Register the migration on both nodes. LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreiaqs2jvnjgddkkhxzhhfmrr6o4yohhqymbi55b7ltynxo4tmge4wu", - DestinationSchemaVersionID: "bafkreigc5whyvnmgqvdr6yk366ct4dddgmwnwrnbgbmu4f3edm3sfwerha", + SourceSchemaVersionID: "bafkreibpai5hfnalhtn5mgamzkgml4gwftow7pklmjcn6i4sqey6a5u5ce", + DestinationSchemaVersionID: "bafkreidrbhf54zckhmchzw2ngbobfqtkt7sm6ihbliu2wtxesehz5g4xwm", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -145,8 +145,8 @@ func TestSchemaMigrationQueryWithP2PReplicatedDocAtMuchOlderSchemaVersion(t *tes testUtils.ConfigureMigration{ // Register the migration on both nodes. LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreiaqs2jvnjgddkkhxzhhfmrr6o4yohhqymbi55b7ltynxo4tmge4wu", - DestinationSchemaVersionID: "bafkreigc5whyvnmgqvdr6yk366ct4dddgmwnwrnbgbmu4f3edm3sfwerha", + SourceSchemaVersionID: "bafkreibpai5hfnalhtn5mgamzkgml4gwftow7pklmjcn6i4sqey6a5u5ce", + DestinationSchemaVersionID: "bafkreidrbhf54zckhmchzw2ngbobfqtkt7sm6ihbliu2wtxesehz5g4xwm", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -163,8 +163,8 @@ func TestSchemaMigrationQueryWithP2PReplicatedDocAtMuchOlderSchemaVersion(t *tes testUtils.ConfigureMigration{ // Register the migration on both nodes. LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreigc5whyvnmgqvdr6yk366ct4dddgmwnwrnbgbmu4f3edm3sfwerha", - DestinationSchemaVersionID: "bafkreidtw4d7bv57wmwwwxkejburwuktc2kiakkmzgiacyy5vl7gj2ih5i", + SourceSchemaVersionID: "bafkreidrbhf54zckhmchzw2ngbobfqtkt7sm6ihbliu2wtxesehz5g4xwm", + DestinationSchemaVersionID: "bafkreidiohu3klvu4f2fdqcywtpqild4v7spsn7ivsjtg6sea6ome2oc4i", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -253,8 +253,8 @@ func TestSchemaMigrationQueryWithP2PReplicatedDocAtNewerSchemaVersion(t *testing testUtils.ConfigureMigration{ // Register the migration on both nodes. LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreiaqs2jvnjgddkkhxzhhfmrr6o4yohhqymbi55b7ltynxo4tmge4wu", - DestinationSchemaVersionID: "bafkreigc5whyvnmgqvdr6yk366ct4dddgmwnwrnbgbmu4f3edm3sfwerha", + SourceSchemaVersionID: "bafkreibpai5hfnalhtn5mgamzkgml4gwftow7pklmjcn6i4sqey6a5u5ce", + DestinationSchemaVersionID: "bafkreidrbhf54zckhmchzw2ngbobfqtkt7sm6ihbliu2wtxesehz5g4xwm", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -355,8 +355,8 @@ func TestSchemaMigrationQueryWithP2PReplicatedDocAtMuchNewerSchemaVersionWithSch // Register a migration from version 2 to version 3 on both nodes. // There is no migration from version 1 to 2, thus node 1 has no knowledge of schema version 2. LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreiexwzcpjuz3eaghcanr3fnmyc6el5w6i5ovhop5zfrqctucwlraba", - DestinationSchemaVersionID: "bafkreicyyn7ourjvr2o6bqa57z2bl5wz5u2ykdlmd5v7n53cw7l6xsdplm", + SourceSchemaVersionID: "bafkreib5jaawobqqiu6frzacerlj55pxxxuql3igqj4ldmg2pgilke4bty", + DestinationSchemaVersionID: "bafkreib65lld2tdyvlilbumlcccftqwvflpgutugghf5afrnlhdg7dgyv4", Lens: model.Lens{ Lenses: []model.LensModule{ { diff --git a/tests/integration/schema/migrations/query/with_restart_test.go b/tests/integration/schema/migrations/query/with_restart_test.go index 196b5cf57e..f44264312c 100644 --- a/tests/integration/schema/migrations/query/with_restart_test.go +++ b/tests/integration/schema/migrations/query/with_restart_test.go @@ -45,8 +45,8 @@ func TestSchemaMigrationQueryWithRestart(t *testing.T) { }, testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreiebcgze3rs6j3g7gu65dwskdg5fn3qby5c6nqffhbdkcy2l5bbvp4", - DestinationSchemaVersionID: "bafkreiexwzcpjuz3eaghcanr3fnmyc6el5w6i5ovhop5zfrqctucwlraba", + SourceSchemaVersionID: "bafkreia3o3cetvcnnxyu5spucimoos77ifungfmacxdkva4zah2is3aooe", + DestinationSchemaVersionID: "bafkreib5jaawobqqiu6frzacerlj55pxxxuql3igqj4ldmg2pgilke4bty", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -99,8 +99,8 @@ func TestSchemaMigrationQueryWithRestartAndMigrationBeforeSchemaPatch(t *testing }, testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreiebcgze3rs6j3g7gu65dwskdg5fn3qby5c6nqffhbdkcy2l5bbvp4", - DestinationSchemaVersionID: "bafkreiexwzcpjuz3eaghcanr3fnmyc6el5w6i5ovhop5zfrqctucwlraba", + SourceSchemaVersionID: "bafkreia3o3cetvcnnxyu5spucimoos77ifungfmacxdkva4zah2is3aooe", + DestinationSchemaVersionID: "bafkreib5jaawobqqiu6frzacerlj55pxxxuql3igqj4ldmg2pgilke4bty", Lens: model.Lens{ Lenses: []model.LensModule{ { diff --git a/tests/integration/schema/migrations/query/with_schema_branch_test.go b/tests/integration/schema/migrations/query/with_schema_branch_test.go new file mode 100644 index 0000000000..0ed9e68aca --- /dev/null +++ b/tests/integration/schema/migrations/query/with_schema_branch_test.go @@ -0,0 +1,107 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package query + +import ( + "testing" + + "github.com/lens-vm/lens/host-go/config/model" + "github.com/sourcenetwork/immutable" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" + "github.com/sourcenetwork/defradb/tests/lenses" +) + +func TestSchemaMigrationQuery_WithBranchingSchema(t *testing.T) { + schemaVersion1ID := "bafkreia3o3cetvcnnxyu5spucimoos77ifungfmacxdkva4zah2is3aooe" + + test := testUtils.TestCase{ + Description: "Test schema update, with branching schema migrations", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + } + `, + }, + testUtils.SchemaPatch{ + SetAsDefaultVersion: immutable.Some(true), + Patch: ` + [ + { "op": "add", "path": "/Users/Fields/-", "value": {"Name": "email", "Kind": 11} } + ] + `, + Lens: immutable.Some(model.Lens{ + Lenses: []model.LensModule{ + { + Path: lenses.SetDefaultModulePath, + Arguments: map[string]any{ + "dst": "name", + "value": "Fred", + }, + }, + }, + }), + }, + testUtils.CreateDoc{ + // Create a document on the second schema version, with an email field value + Doc: `{ + "name": "John", + "email": "john@source.hub" + }`, + }, + testUtils.SetActiveSchemaVersion{ + // Set the active schema version back to the first + SchemaVersionID: schemaVersion1ID, + }, + testUtils.SchemaPatch{ + // The third schema version will be set as the active version, going from version 1 to 3 + SetAsDefaultVersion: immutable.Some(true), + Patch: ` + [ + { "op": "add", "path": "/Users/Fields/-", "value": {"Name": "phone", "Kind": 11} } + ] + `, + Lens: immutable.Some(model.Lens{ + Lenses: []model.LensModule{ + { + Path: lenses.SetDefaultModulePath, + Arguments: map[string]any{ + "dst": "phone", + "value": "1234567890", + }, + }, + }, + }), + }, + testUtils.Request{ + Request: ` + query { + Users { + name + phone + } + } + `, + Results: []map[string]any{ + { + // name has been cleared by the inverse of the migration from version 1 to 2 + "name": nil, + // phone has been set by the migration from version 1 to 3 + "phone": "1234567890", + }, + }, + }, + }, + } + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/schema/migrations/query/with_set_default_test.go b/tests/integration/schema/migrations/query/with_set_default_test.go index d18f2f4092..17c147338c 100644 --- a/tests/integration/schema/migrations/query/with_set_default_test.go +++ b/tests/integration/schema/migrations/query/with_set_default_test.go @@ -22,7 +22,7 @@ import ( ) func TestSchemaMigrationQuery_WithSetDefaultToLatest_AppliesForwardMigration(t *testing.T) { - schemaVersionID2 := "bafkreigc5whyvnmgqvdr6yk366ct4dddgmwnwrnbgbmu4f3edm3sfwerha" + schemaVersionID2 := "bafkreidrbhf54zckhmchzw2ngbobfqtkt7sm6ihbliu2wtxesehz5g4xwm" test := testUtils.TestCase{ Description: "Test schema migration", @@ -83,8 +83,8 @@ func TestSchemaMigrationQuery_WithSetDefaultToLatest_AppliesForwardMigration(t * } func TestSchemaMigrationQuery_WithSetDefaultToOriginal_AppliesInverseMigration(t *testing.T) { - schemaVersionID1 := "bafkreiaqs2jvnjgddkkhxzhhfmrr6o4yohhqymbi55b7ltynxo4tmge4wu" - schemaVersionID2 := "bafkreigc5whyvnmgqvdr6yk366ct4dddgmwnwrnbgbmu4f3edm3sfwerha" + schemaVersionID1 := "bafkreibpai5hfnalhtn5mgamzkgml4gwftow7pklmjcn6i4sqey6a5u5ce" + schemaVersionID2 := "bafkreidrbhf54zckhmchzw2ngbobfqtkt7sm6ihbliu2wtxesehz5g4xwm" test := testUtils.TestCase{ Description: "Test schema migration", @@ -158,8 +158,8 @@ func TestSchemaMigrationQuery_WithSetDefaultToOriginal_AppliesInverseMigration(t } func TestSchemaMigrationQuery_WithSetDefaultToOriginalVersionThatDocWasCreatedAt_ClearsMigrations(t *testing.T) { - schemaVersionID1 := "bafkreiaqs2jvnjgddkkhxzhhfmrr6o4yohhqymbi55b7ltynxo4tmge4wu" - schemaVersionID2 := "bafkreigc5whyvnmgqvdr6yk366ct4dddgmwnwrnbgbmu4f3edm3sfwerha" + schemaVersionID1 := "bafkreibpai5hfnalhtn5mgamzkgml4gwftow7pklmjcn6i4sqey6a5u5ce" + schemaVersionID2 := "bafkreidrbhf54zckhmchzw2ngbobfqtkt7sm6ihbliu2wtxesehz5g4xwm" test := testUtils.TestCase{ Description: "Test schema migration", diff --git a/tests/integration/schema/migrations/query/with_txn_test.go b/tests/integration/schema/migrations/query/with_txn_test.go index a4cbba67f8..880f9e01ed 100644 --- a/tests/integration/schema/migrations/query/with_txn_test.go +++ b/tests/integration/schema/migrations/query/with_txn_test.go @@ -47,8 +47,8 @@ func TestSchemaMigrationQueryWithTxn(t *testing.T) { testUtils.ConfigureMigration{ TransactionID: immutable.Some(0), LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreiebcgze3rs6j3g7gu65dwskdg5fn3qby5c6nqffhbdkcy2l5bbvp4", - DestinationSchemaVersionID: "bafkreiexwzcpjuz3eaghcanr3fnmyc6el5w6i5ovhop5zfrqctucwlraba", + SourceSchemaVersionID: "bafkreia3o3cetvcnnxyu5spucimoos77ifungfmacxdkva4zah2is3aooe", + DestinationSchemaVersionID: "bafkreib5jaawobqqiu6frzacerlj55pxxxuql3igqj4ldmg2pgilke4bty", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -109,8 +109,8 @@ func TestSchemaMigrationQueryWithTxnAndCommit(t *testing.T) { testUtils.ConfigureMigration{ TransactionID: immutable.Some(0), LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreiebcgze3rs6j3g7gu65dwskdg5fn3qby5c6nqffhbdkcy2l5bbvp4", - DestinationSchemaVersionID: "bafkreiexwzcpjuz3eaghcanr3fnmyc6el5w6i5ovhop5zfrqctucwlraba", + SourceSchemaVersionID: "bafkreia3o3cetvcnnxyu5spucimoos77ifungfmacxdkva4zah2is3aooe", + DestinationSchemaVersionID: "bafkreib5jaawobqqiu6frzacerlj55pxxxuql3igqj4ldmg2pgilke4bty", Lens: model.Lens{ Lenses: []model.LensModule{ { diff --git a/tests/integration/schema/migrations/query/with_update_test.go b/tests/integration/schema/migrations/query/with_update_test.go index b01c197c46..93a2586e25 100644 --- a/tests/integration/schema/migrations/query/with_update_test.go +++ b/tests/integration/schema/migrations/query/with_update_test.go @@ -45,8 +45,8 @@ func TestSchemaMigrationQueryWithUpdateRequest(t *testing.T) { }, testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreiebcgze3rs6j3g7gu65dwskdg5fn3qby5c6nqffhbdkcy2l5bbvp4", - DestinationSchemaVersionID: "bafkreiexwzcpjuz3eaghcanr3fnmyc6el5w6i5ovhop5zfrqctucwlraba", + SourceSchemaVersionID: "bafkreia3o3cetvcnnxyu5spucimoos77ifungfmacxdkva4zah2is3aooe", + DestinationSchemaVersionID: "bafkreib5jaawobqqiu6frzacerlj55pxxxuql3igqj4ldmg2pgilke4bty", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -129,8 +129,8 @@ func TestSchemaMigrationQueryWithMigrationRegisteredAfterUpdate(t *testing.T) { }, testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreiebcgze3rs6j3g7gu65dwskdg5fn3qby5c6nqffhbdkcy2l5bbvp4", - DestinationSchemaVersionID: "bafkreiexwzcpjuz3eaghcanr3fnmyc6el5w6i5ovhop5zfrqctucwlraba", + SourceSchemaVersionID: "bafkreia3o3cetvcnnxyu5spucimoos77ifungfmacxdkva4zah2is3aooe", + DestinationSchemaVersionID: "bafkreib5jaawobqqiu6frzacerlj55pxxxuql3igqj4ldmg2pgilke4bty", Lens: model.Lens{ Lenses: []model.LensModule{ { diff --git a/tests/integration/schema/migrations/simple_test.go b/tests/integration/schema/migrations/simple_test.go index 07fa12ca53..a7826f5366 100644 --- a/tests/integration/schema/migrations/simple_test.go +++ b/tests/integration/schema/migrations/simple_test.go @@ -106,8 +106,8 @@ func TestSchemaMigrationGetMigrationsReturnsMultiple(t *testing.T) { }, testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreiebcgze3rs6j3g7gu65dwskdg5fn3qby5c6nqffhbdkcy2l5bbvp4", - DestinationSchemaVersionID: "bafkreiexwzcpjuz3eaghcanr3fnmyc6el5w6i5ovhop5zfrqctucwlraba", + SourceSchemaVersionID: "bafkreia3o3cetvcnnxyu5spucimoos77ifungfmacxdkva4zah2is3aooe", + DestinationSchemaVersionID: "bafkreib5jaawobqqiu6frzacerlj55pxxxuql3igqj4ldmg2pgilke4bty", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -154,11 +154,11 @@ func TestSchemaMigrationGetMigrationsReturnsMultiple(t *testing.T) { }, { ID: 3, - SchemaVersionID: "bafkreiebcgze3rs6j3g7gu65dwskdg5fn3qby5c6nqffhbdkcy2l5bbvp4", + SchemaVersionID: "bafkreia3o3cetvcnnxyu5spucimoos77ifungfmacxdkva4zah2is3aooe", }, { ID: 4, - SchemaVersionID: "bafkreiexwzcpjuz3eaghcanr3fnmyc6el5w6i5ovhop5zfrqctucwlraba", + SchemaVersionID: "bafkreib5jaawobqqiu6frzacerlj55pxxxuql3igqj4ldmg2pgilke4bty", Sources: []any{ &client.CollectionSource{ SourceCollectionID: 3, diff --git a/tests/integration/schema/one_one_test.go b/tests/integration/schema/one_one_test.go new file mode 100644 index 0000000000..b5bc75bb48 --- /dev/null +++ b/tests/integration/schema/one_one_test.go @@ -0,0 +1,61 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package schema + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestSchemaOneOne_NoPrimary_Errors(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + dog: Dog + } + type Dog { + name: String + owner: User + } + `, + ExpectedError: "relation missing field. Object: Dog, RelationName: dog_user", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestSchemaOneOne_TwoPrimaries_Errors(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + dog: Dog @primary + } + type Dog { + name: String + owner: User @primary + } + `, + ExpectedError: "relation can only have a single field set as primary", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/schema/relations_test.go b/tests/integration/schema/relations_test.go index d1b420afb6..892c6e67ac 100644 --- a/tests/integration/schema/relations_test.go +++ b/tests/integration/schema/relations_test.go @@ -23,7 +23,7 @@ func TestSchemaRelationOneToOne(t *testing.T) { Schema: ` type Dog { name: String - user: User + user: User @primary } type User { dog: Dog @@ -135,45 +135,7 @@ func TestSchemaRelationErrorsGivenOneSidedManyRelationField(t *testing.T) { dogs: [Dog] } `, - ExpectedError: "relation must be defined on both schemas. Field: dogs, Type: Dog", - }, - }, - } - - testUtils.ExecuteTestCase(t, test) -} - -func TestSchemaRelationErrorsGivenOneSidedRelationField(t *testing.T) { - test := testUtils.TestCase{ - Actions: []any{ - testUtils.SchemaUpdate{ - Schema: ` - type Dog { - name: String - } - type User { - dog: Dog - } - `, - ExpectedError: "relation must be defined on both schemas. Field: dog, Type: Dog", - }, - }, - } - - testUtils.ExecuteTestCase(t, test) -} - -func TestSchemaRelation_GivenSelfReferemceRelationField_ReturnError(t *testing.T) { - test := testUtils.TestCase{ - Actions: []any{ - testUtils.SchemaUpdate{ - Schema: ` - type Dog { - name: String - bestMate: Dog - } - `, - ExpectedError: "relation must be defined on both schemas. Field: bestMate, Type: Dog", + ExpectedError: "relation missing field. Object: Dog, RelationName: dog_user", }, }, } diff --git a/tests/integration/schema/simple_test.go b/tests/integration/schema/simple_test.go index b8ca9c71e7..9e169e6178 100644 --- a/tests/integration/schema/simple_test.go +++ b/tests/integration/schema/simple_test.go @@ -20,7 +20,7 @@ import ( ) func TestSchemaSimpleCreatesSchemaGivenEmptyType(t *testing.T) { - schemaVersionID := "bafkreiegrxzoqa3mdgjsfz2vuatbpjbnqxub6yi23dvdumjpt4g3nhiwzq" + schemaVersionID := "bafkreia2jn5ecrhtvy4fravk6pm3wqiny46m7mqymvjkgat7xiqupgqoai" test := testUtils.TestCase{ Actions: []any{ @@ -180,7 +180,7 @@ func TestSchemaSimpleErrorsGivenTypeWithInvalidFieldType(t *testing.T) { name: NotAType } `, - ExpectedError: "relation must be defined on both schemas. Field: name, Type: NotAType", + ExpectedError: "no type found for given name. Field: name, Kind: NotAType", }, }, } diff --git a/tests/integration/schema/updates/add/field/crdt/pcounter_test.go b/tests/integration/schema/updates/add/field/crdt/pcounter_test.go new file mode 100644 index 0000000000..b7edfe7269 --- /dev/null +++ b/tests/integration/schema/updates/add/field/crdt/pcounter_test.go @@ -0,0 +1,73 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package crdt + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestSchemaUpdates_AddFieldCRDTPCounter_NoError(t *testing.T) { + test := testUtils.TestCase{ + Description: "Test schema update, add field with crdt P Counter (5)", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + } + `, + }, + testUtils.SchemaPatch{ + Patch: ` + [ + { "op": "add", "path": "/Users/Fields/-", "value": {"Name": "foo", "Kind": "Int", "Typ": 5} } + ] + `, + }, + testUtils.Request{ + Request: `query { + Users { + name + foo + } + }`, + Results: []map[string]any{}, + }, + }, + } + testUtils.ExecuteTestCase(t, test) +} + +func TestSchemaUpdates_AddFieldCRDTPCounterWithMismatchKind_Error(t *testing.T) { + test := testUtils.TestCase{ + Description: "Test schema update, add field with crdt P Counter (5)", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + } + `, + }, + testUtils.SchemaPatch{ + Patch: ` + [ + { "op": "add", "path": "/Users/Fields/-", "value": {"Name": "foo", "Kind": "Boolean", "Typ": 5} } + ] + `, + ExpectedError: "CRDT type pcounter can't be assigned to field kind Boolean", + }, + }, + } + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/schema/updates/add/field/crdt/pncounter_test.go b/tests/integration/schema/updates/add/field/crdt/pncounter_test.go index 2664118c0f..e4be1c1df8 100644 --- a/tests/integration/schema/updates/add/field/crdt/pncounter_test.go +++ b/tests/integration/schema/updates/add/field/crdt/pncounter_test.go @@ -30,7 +30,7 @@ func TestSchemaUpdates_AddFieldCRDTPNCounter_NoError(t *testing.T) { testUtils.SchemaPatch{ Patch: ` [ - { "op": "add", "path": "/Users/Fields/-", "value": {"Name": "foo", "Kind": 4, "Typ": 4} } + { "op": "add", "path": "/Users/Fields/-", "value": {"Name": "foo", "Kind": "Int", "Typ": 4} } ] `, }, @@ -62,7 +62,7 @@ func TestSchemaUpdates_AddFieldCRDTPNCounterWithMismatchKind_Error(t *testing.T) testUtils.SchemaPatch{ Patch: ` [ - { "op": "add", "path": "/Users/Fields/-", "value": {"Name": "foo", "Kind": 2, "Typ": 4} } + { "op": "add", "path": "/Users/Fields/-", "value": {"Name": "foo", "Kind": "Boolean", "Typ": 4} } ] `, ExpectedError: "CRDT type pncounter can't be assigned to field kind Boolean", diff --git a/tests/integration/schema/updates/add/field/create_update_test.go b/tests/integration/schema/updates/add/field/create_update_test.go index 0fa756891c..d299b70e7f 100644 --- a/tests/integration/schema/updates/add/field/create_update_test.go +++ b/tests/integration/schema/updates/add/field/create_update_test.go @@ -17,8 +17,8 @@ import ( ) func TestSchemaUpdatesAddFieldWithCreateWithUpdateAfterSchemaUpdateAndVersionJoin(t *testing.T) { - initialSchemaVersionId := "bafkreiebcgze3rs6j3g7gu65dwskdg5fn3qby5c6nqffhbdkcy2l5bbvp4" - updatedSchemaVersionId := "bafkreidn4f3i52756wevi3sfpbqzijgy6v24zh565pmvtmpqr4ou52v2q4" + initialSchemaVersionId := "bafkreia3o3cetvcnnxyu5spucimoos77ifungfmacxdkva4zah2is3aooe" + updatedSchemaVersionId := "bafkreibz4g6rkxanzn6ro74ezmbwoe5hvcguwvi34judrk2kfuqqtk5ak4" test := testUtils.TestCase{ Description: "Test schema update, add field with update after schema update, version join", @@ -105,8 +105,8 @@ func TestSchemaUpdatesAddFieldWithCreateWithUpdateAfterSchemaUpdateAndVersionJoi } func TestSchemaUpdatesAddFieldWithCreateWithUpdateAfterSchemaUpdateAndCommitQuery(t *testing.T) { - initialSchemaVersionId := "bafkreiebcgze3rs6j3g7gu65dwskdg5fn3qby5c6nqffhbdkcy2l5bbvp4" - updatedSchemaVersionId := "bafkreidn4f3i52756wevi3sfpbqzijgy6v24zh565pmvtmpqr4ou52v2q4" + initialSchemaVersionId := "bafkreia3o3cetvcnnxyu5spucimoos77ifungfmacxdkva4zah2is3aooe" + updatedSchemaVersionId := "bafkreibz4g6rkxanzn6ro74ezmbwoe5hvcguwvi34judrk2kfuqqtk5ak4" test := testUtils.TestCase{ Description: "Test schema update, add field with update after schema update, commits query", diff --git a/tests/integration/schema/updates/add/field/kind/foreign_object_array_test.go b/tests/integration/schema/updates/add/field/kind/foreign_object_array_test.go index 95b19e1a59..abeff648fd 100644 --- a/tests/integration/schema/updates/add/field/kind/foreign_object_array_test.go +++ b/tests/integration/schema/updates/add/field/kind/foreign_object_array_test.go @@ -1,4 +1,4 @@ -// Copyright 2023 Democratized Data Foundation +// Copyright 2024 Democratized Data Foundation // // Use of this software is governed by the Business Source License // included in the file licenses/BSL.txt. @@ -11,15 +11,14 @@ package kind import ( - "fmt" "testing" testUtils "github.com/sourcenetwork/defradb/tests/integration" ) -func TestSchemaUpdatesAddFieldKindForeignObjectArray(t *testing.T) { +func TestSchemaUpdatesAddFieldKindForeignObjectArray_UnknownSchema(t *testing.T) { test := testUtils.TestCase{ - Description: "Test schema update, add field with kind foreign object array (17)", + Description: "Test schema update, add field with kind foreign object array, unknown schema", Actions: []any{ testUtils.SchemaUpdate{ Schema: ` @@ -31,420 +30,21 @@ func TestSchemaUpdatesAddFieldKindForeignObjectArray(t *testing.T) { testUtils.SchemaPatch{ Patch: ` [ - { "op": "add", "path": "/Users/Fields/-", "value": {"Name": "foo", "Kind": 17} } - ] - `, - ExpectedError: "a `Schema` [name] must be provided when adding a new relation field. Field: foo, Kind: 17", - }, - }, - } - testUtils.ExecuteTestCase(t, test) -} - -func TestSchemaUpdatesAddFieldKindForeignObjectArray_InvalidSchemaJson(t *testing.T) { - test := testUtils.TestCase{ - Description: "Test schema update, add field with kind foreign object array (17), invalid schema json", - Actions: []any{ - testUtils.SchemaUpdate{ - Schema: ` - type Users { - name: String - } - `, - }, - testUtils.SchemaPatch{ - Patch: ` - [ - { "op": "add", "path": "/Users/Fields/-", "value": {"Name": "foo", "Kind": 17, "Schema": 123} } - ] - `, - ExpectedError: "json: cannot unmarshal number into Go struct field SchemaFieldDescription.Fields.Schema of type string", - }, - }, - } - testUtils.ExecuteTestCase(t, test) -} - -func TestSchemaUpdatesAddFieldKindForeignObjectArray_MissingRelationName(t *testing.T) { - test := testUtils.TestCase{ - Description: "Test schema update, add field with kind foreign object array (17), missing relation name", - Actions: []any{ - testUtils.SchemaUpdate{ - Schema: ` - type Users { - name: String - } - `, - }, - testUtils.SchemaPatch{ - Patch: ` - [ - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo", "Kind": 17, "Schema": "Users" - }} - ] - `, - ExpectedError: "missing relation name. Field: foo", - }, - }, - } - testUtils.ExecuteTestCase(t, test) -} - -func TestSchemaUpdatesAddFieldKindForeignObjectArray_IDFieldMissingKind(t *testing.T) { - test := testUtils.TestCase{ - Description: "Test schema update, add field with kind foreign object array (17), id field missing kind", - Actions: []any{ - testUtils.SchemaUpdate{ - Schema: ` - type Users { - name: String - } - `, - }, - testUtils.SchemaPatch{ - Patch: ` - [ - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo", "Kind": 16, "IsPrimaryRelation": true, "Schema": "Users", "RelationName": "foo" - }}, - { "op": "add", "path": "/Users/Fields/-", "value": {"Name": "foo_id"} } - ] - `, - ExpectedError: "relational id field of invalid kind. Field: foo_id, Expected: ID, Actual: 0", - }, - }, - } - testUtils.ExecuteTestCase(t, test) -} - -func TestSchemaUpdatesAddFieldKindForeignObjectArray_IDFieldInvalidKind(t *testing.T) { - test := testUtils.TestCase{ - Description: "Test schema update, add field with kind foreign object array (17), id field invalid kind", - Actions: []any{ - testUtils.SchemaUpdate{ - Schema: ` - type Users { - name: String - } - `, - }, - testUtils.SchemaPatch{ - Patch: ` - [ - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo", "Kind": 16, "IsPrimaryRelation": true, "Schema": "Users", "RelationName": "foo" - }}, - { "op": "add", "path": "/Users/Fields/-", "value": {"Name": "foo_id", "Kind": 2} } - ] - `, - ExpectedError: "relational id field of invalid kind. Field: foo_id, Expected: ID, Actual: Boolean", - }, - }, - } - testUtils.ExecuteTestCase(t, test) -} - -func TestSchemaUpdatesAddFieldKindForeignObjectArray_IDFieldMissingRelationName(t *testing.T) { - test := testUtils.TestCase{ - Description: "Test schema update, add field with kind foreign object array (17), id field missing relation name", - Actions: []any{ - testUtils.SchemaUpdate{ - Schema: ` - type Users { - name: String - } - `, - }, - testUtils.SchemaPatch{ - Patch: ` - [ - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo", "Kind": 16, "IsPrimaryRelation": true, "Schema": "Users", "RelationName": "foo" - }}, - { "op": "add", "path": "/Users/Fields/-", "value": {"Name": "foo_id", "Kind": 1} } - ] - `, - ExpectedError: "missing relation name. Field: foo_id", - }, - }, - } - testUtils.ExecuteTestCase(t, test) -} - -func TestSchemaUpdatesAddFieldKindForeignObjectArray_OnlyHalfRelationDefined(t *testing.T) { - test := testUtils.TestCase{ - Description: "Test schema update, add field with kind foreign object array (17), only half relation defined", - Actions: []any{ - testUtils.SchemaUpdate{ - Schema: ` - type Users { - name: String - } - `, - }, - testUtils.SchemaPatch{ - Patch: ` - [ - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo", "Kind": 16, "IsPrimaryRelation": true, "Schema": "Users", "RelationName": "foo" - }}, - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo_id", "Kind": 1, "RelationName": "foo" - }} - ] - `, - ExpectedError: "relation must be defined on both schemas. Field: foo, Type: Users", - }, - }, - } - testUtils.ExecuteTestCase(t, test) -} - -func TestSchemaUpdatesAddFieldKindForeignObjectArray_NoPrimaryDefined(t *testing.T) { - test := testUtils.TestCase{ - Description: "Test schema update, add field with kind foreign object array (17), no primary defined", - Actions: []any{ - testUtils.SchemaUpdate{ - Schema: ` - type Users { - name: String - } - `, - }, - testUtils.SchemaPatch{ - Patch: ` - [ - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo", "Kind": 16, "Schema": "Users", "RelationName": "foo" - }}, - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo_id", "Kind": 1, "RelationName": "foo" - }}, - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foobar", "Kind": 17, "Schema": "Users", "RelationName": "foo" - }} - ] - `, - ExpectedError: "primary side of relation not defined. RelationName: foo", - }, - }, - } - testUtils.ExecuteTestCase(t, test) -} - -func TestSchemaUpdatesAddFieldKindForeignObjectArray_PrimaryDefinedOnManySide(t *testing.T) { - test := testUtils.TestCase{ - Description: "Test schema update, add field with kind foreign object array (17), no primary defined", - Actions: []any{ - testUtils.SchemaUpdate{ - Schema: ` - type Users { - name: String - } - `, - }, - testUtils.SchemaPatch{ - Patch: ` - [ - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo", "Kind": 16, "Schema": "Users", "RelationName": "foo" - }}, - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo_id", "Kind": 1, "RelationName": "foo" - }}, - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foobar", "Kind": 17, "IsPrimaryRelation": true, "Schema": "Users", "RelationName": "foo" - }} - ] - `, - ExpectedError: "cannot set the many side of a relation as primary. Field: foobar", - }, - }, - } - testUtils.ExecuteTestCase(t, test) -} - -func TestSchemaUpdatesAddFieldKindForeignObjectArray_Succeeds(t *testing.T) { - key1 := "bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad" - - test := testUtils.TestCase{ - Description: "Test schema update, add field with kind foreign object array (17), valid, functional", - Actions: []any{ - testUtils.SchemaUpdate{ - Schema: ` - type Users { - name: String - } - `, - }, - testUtils.SchemaPatch{ - Patch: ` - [ - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo", "Kind": 16, "IsPrimaryRelation": true, "Schema": "Users", "RelationName": "foo" - }}, - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo_id", "Kind": 1, "RelationName": "foo" - }}, - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foobar", "Kind": 17, "Schema": "Users", "RelationName": "foo" - }} - ] - `, - }, - testUtils.Request{ - Request: `mutation { - create_Users(input: {name: "John"}) { - _docID - } - }`, - Results: []map[string]any{ - { - "_docID": key1, - }, - }, - }, - testUtils.Request{ - Request: fmt.Sprintf(`mutation { - create_Users(input: {name: "Keenan", foo: "%s"}) { - name - foo { - name - } - } - }`, - key1, - ), - Results: []map[string]any{ - { - "name": "Keenan", - "foo": map[string]any{ - "name": "John", - }, - }, - }, - }, - testUtils.Request{ - Request: `query { - Users { - name - foo { - name - } - foobar { - name - } - } - }`, - Results: []map[string]any{ - { - "name": "Keenan", - "foo": map[string]any{ - "name": "John", - }, - "foobar": []map[string]any{}, - }, - { - "name": "John", - "foo": nil, - "foobar": []map[string]any{ - { - "name": "Keenan", - }, - }, - }, - }, - }, - }, - } - testUtils.ExecuteTestCase(t, test) -} - -func TestSchemaUpdatesAddFieldKindForeignObjectArray_SinglePrimaryObjectKindSubstitution(t *testing.T) { - key1 := "bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad" - - test := testUtils.TestCase{ - Description: "Test schema update, add field with kind foreign object array (17), with single object Kind substitution", - Actions: []any{ - testUtils.SchemaUpdate{ - Schema: ` - type Users { - name: String - } - `, - }, - testUtils.SchemaPatch{ - Patch: ` - [ - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo", "Kind": "Users", "IsPrimaryRelation": true, "Schema": "Users", "RelationName": "foo" - }}, - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo_id", "Kind": 1, "RelationName": "foo" - }}, { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foobar", "Kind": 17, "Schema": "Users", "RelationName": "foo" + "Name": "foo", "Kind": "[Unknown]" }} ] `, - }, - testUtils.CreateDoc{ - CollectionID: 0, - Doc: `{ - "name": "John" - }`, - }, - testUtils.CreateDoc{ - CollectionID: 0, - Doc: fmt.Sprintf(`{ - "name": "Keenan", - "foo": "%s" - }`, - key1, - ), - }, - testUtils.Request{ - Request: `query { - Users { - name - foo { - name - } - foobar { - name - } - } - }`, - Results: []map[string]any{ - { - "name": "Keenan", - "foo": map[string]any{ - "name": "John", - }, - "foobar": []map[string]any{}, - }, - { - "name": "John", - "foo": nil, - "foobar": []map[string]any{ - { - "name": "Keenan", - }, - }, - }, - }, + ExpectedError: "no type found for given name. Field: foo, Kind: Unknown", }, }, } testUtils.ExecuteTestCase(t, test) } -func TestSchemaUpdatesAddFieldKindForeignObjectArray_SingleSecondaryObjectKindSubstitution(t *testing.T) { - key1 := "bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad" - +func TestSchemaUpdatesAddFieldKindForeignObjectArray_KnownSchema(t *testing.T) { test := testUtils.TestCase{ - Description: "Test schema update, add field with kind foreign object array (17), with single object Kind substitution", + Description: "Test schema update, add field with kind foreign object array, known schema", Actions: []any{ testUtils.SchemaUpdate{ Schema: ` @@ -457,413 +57,11 @@ func TestSchemaUpdatesAddFieldKindForeignObjectArray_SingleSecondaryObjectKindSu Patch: ` [ { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo", "Kind": 16, "IsPrimaryRelation": true, "Schema": "Users", "RelationName": "foo" - }}, - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo_id", "Kind": 1, "RelationName": "foo" - }}, - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foobar", "Kind": "[Users]", "Schema": "Users", "RelationName": "foo" + "Name": "foo", "Kind": "[Users]" }} ] `, - }, - testUtils.CreateDoc{ - CollectionID: 0, - Doc: `{ - "name": "John" - }`, - }, - testUtils.CreateDoc{ - CollectionID: 0, - Doc: fmt.Sprintf(`{ - "name": "Keenan", - "foo_id": "%s" - }`, - key1, - ), - }, - testUtils.Request{ - Request: `query { - Users { - name - foo { - name - } - foobar { - name - } - } - }`, - Results: []map[string]any{ - { - "name": "Keenan", - "foo": map[string]any{ - "name": "John", - }, - "foobar": []map[string]any{}, - }, - { - "name": "John", - "foo": nil, - "foobar": []map[string]any{ - { - "name": "Keenan", - }, - }, - }, - }, - }, - }, - } - testUtils.ExecuteTestCase(t, test) -} - -func TestSchemaUpdatesAddFieldKindForeignObjectArray_ObjectKindSubstitution(t *testing.T) { - key1 := "bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad" - - test := testUtils.TestCase{ - Description: "Test schema update, add field with kind foreign object array (17), with object Kind substitution", - Actions: []any{ - testUtils.SchemaUpdate{ - Schema: ` - type Users { - name: String - } - `, - }, - testUtils.SchemaPatch{ - Patch: ` - [ - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo", "Kind": "Users", "IsPrimaryRelation": true, "Schema": "Users", "RelationName": "foo" - }}, - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo_id", "Kind": 1, "RelationName": "foo" - }}, - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foobar", "Kind": "[Users]", "Schema": "Users", "RelationName": "foo" - }} - ] - `, - }, - testUtils.CreateDoc{ - CollectionID: 0, - Doc: `{ - "name": "John" - }`, - }, - testUtils.CreateDoc{ - CollectionID: 0, - Doc: fmt.Sprintf(`{ - "name": "Keenan", - "foo": "%s" - }`, - key1, - ), - }, - testUtils.Request{ - Request: `query { - Users { - name - foo { - name - } - foobar { - name - } - } - }`, - Results: []map[string]any{ - { - "name": "Keenan", - "foo": map[string]any{ - "name": "John", - }, - "foobar": []map[string]any{}, - }, - { - "name": "John", - "foo": nil, - "foobar": []map[string]any{ - { - "name": "Keenan", - }, - }, - }, - }, - }, - }, - } - testUtils.ExecuteTestCase(t, test) -} - -func TestSchemaUpdatesAddFieldKindForeignObjectArray_ObjectKindSubstitutionWithAutoSchemaValues(t *testing.T) { - key1 := "bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad" - - test := testUtils.TestCase{ - Description: "Test schema update, add field with kind foreign object array (17), with object Kind substitution", - Actions: []any{ - testUtils.SchemaUpdate{ - Schema: ` - type Users { - name: String - } - `, - }, - testUtils.SchemaPatch{ - Patch: ` - [ - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo", "Kind": "Users", "IsPrimaryRelation": true, "RelationName": "foo" - }}, - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo_id", "Kind": 1, "RelationName": "foo" - }}, - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foobar", "Kind": "[Users]", "RelationName": "foo" - }} - ] - `, - }, - testUtils.CreateDoc{ - CollectionID: 0, - Doc: `{ - "name": "John" - }`, - }, - testUtils.CreateDoc{ - CollectionID: 0, - Doc: fmt.Sprintf(`{ - "name": "Keenan", - "foo": "%s" - }`, - key1, - ), - }, - testUtils.Request{ - Request: `query { - Users { - name - foo { - name - } - foobar { - name - } - } - }`, - Results: []map[string]any{ - { - "name": "Keenan", - "foo": map[string]any{ - "name": "John", - }, - "foobar": []map[string]any{}, - }, - { - "name": "John", - "foo": nil, - "foobar": []map[string]any{ - { - "name": "Keenan", - }, - }, - }, - }, - }, - }, - } - testUtils.ExecuteTestCase(t, test) -} - -func TestSchemaUpdatesAddFieldKindForeignObjectArray_PrimaryObjectKindAndSchemaMismatch(t *testing.T) { - test := testUtils.TestCase{ - Description: "Test schema update, add field with kind foreign object array (17), with Kind and Schema mismatch", - Actions: []any{ - testUtils.SchemaUpdate{ - Schema: ` - type Users { - name: String - } - `, - }, - testUtils.SchemaUpdate{ - Schema: ` - type Dog { - name: String - } - `, - }, - testUtils.SchemaPatch{ - Patch: ` - [ - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo", "Kind": "Users", "IsPrimaryRelation": true, "Schema": "Dog", "RelationName": "foo" - }}, - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo_id", "Kind": 1, "RelationName": "foo" - }}, - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foobar", "Kind": "[Users]", "Schema": "Users", "RelationName": "foo" - }} - ] - `, - ExpectedError: "field Kind does not match field Schema. Kind: Users, Schema: Dog", - }, - }, - } - testUtils.ExecuteTestCase(t, test) -} - -func TestSchemaUpdatesAddFieldKindForeignObjectArray_SecondaryObjectKindAndSchemaMismatch(t *testing.T) { - test := testUtils.TestCase{ - Description: "Test schema update, add field with kind foreign object array (17), with Kind and Schema mismatch", - Actions: []any{ - testUtils.SchemaUpdate{ - Schema: ` - type Users { - name: String - } - `, - }, - testUtils.SchemaUpdate{ - Schema: ` - type Dog { - name: String - } - `, - }, - testUtils.SchemaPatch{ - Patch: ` - [ - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo", "Kind": "Users", "IsPrimaryRelation": true, "Schema": "Users", "RelationName": "foo" - }}, - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo_id", "Kind": 1, "RelationName": "foo" - }}, - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foobar", "Kind": "[Users]", "Schema": "Dog", "RelationName": "foo" - }} - ] - `, - ExpectedError: "field Kind does not match field Schema. Kind: [Users], Schema: Dog", - }, - }, - } - testUtils.ExecuteTestCase(t, test) -} - -func TestSchemaUpdatesAddFieldKindForeignObjectArray_MissingPrimaryIDField(t *testing.T) { - key1 := "bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad" - - test := testUtils.TestCase{ - Description: "Test schema update, add field with kind foreign object array (17), with auto id field generation", - Actions: []any{ - testUtils.SchemaUpdate{ - Schema: ` - type Users { - name: String - } - `, - }, - testUtils.SchemaPatch{ - Patch: ` - [ - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo", "Kind": "Users", "IsPrimaryRelation": true, "RelationName": "foo" - }}, - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foobar", "Kind": "[Users]", "RelationName": "foo" - }} - ] - `, - }, - testUtils.CreateDoc{ - CollectionID: 0, - Doc: `{ - "name": "John" - }`, - }, - testUtils.CreateDoc{ - CollectionID: 0, - Doc: fmt.Sprintf(`{ - "name": "Keenan", - "foo": "%s" - }`, - key1, - ), - }, - testUtils.Request{ - Request: `query { - Users { - name - foo_id - foo { - name - } - foobar { - name - } - } - }`, - Results: []map[string]any{ - { - "name": "Keenan", - "foo_id": key1, - "foo": map[string]any{ - "name": "John", - }, - "foobar": []map[string]any{}, - }, - { - "name": "John", - "foo": nil, - "foo_id": nil, - "foobar": []map[string]any{ - { - "name": "Keenan", - }, - }, - }, - }, - }, - }, - } - testUtils.ExecuteTestCase(t, test) -} - -func TestSchemaUpdatesAddFieldKindForeignObjectArray_MissingPrimaryIDField_DoesNotCreateIdOnManySide(t *testing.T) { - test := testUtils.TestCase{ - Description: "Test schema update, add field with kind foreign object array (17), with auto id field generation", - Actions: []any{ - testUtils.SchemaUpdate{ - Schema: ` - type Users { - name: String - } - `, - }, - testUtils.SchemaPatch{ - Patch: ` - [ - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo", "Kind": "Users", "IsPrimaryRelation": true, "RelationName": "foo" - }}, - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foobar", "Kind": "[Users]", "RelationName": "foo" - }} - ] - `, - }, - testUtils.Request{ - Request: `query { - Users { - foobar_id - } - }`, - ExpectedError: `Cannot query field "foobar_id" on type "Users"`, + ExpectedError: "secondary relation fields cannot be defined on the schema. Name: foo", }, }, } diff --git a/tests/integration/schema/updates/add/field/kind/foreign_object_test.go b/tests/integration/schema/updates/add/field/kind/foreign_object_test.go index 525c41d658..56bfbd2131 100644 --- a/tests/integration/schema/updates/add/field/kind/foreign_object_test.go +++ b/tests/integration/schema/updates/add/field/kind/foreign_object_test.go @@ -19,7 +19,7 @@ import ( func TestSchemaUpdatesAddFieldKindForeignObject(t *testing.T) { test := testUtils.TestCase{ - Description: "Test schema update, add field with kind foreign object (16)", + Description: "Test schema update, add field with kind foreign object", Actions: []any{ testUtils.SchemaUpdate{ Schema: ` @@ -34,31 +34,7 @@ func TestSchemaUpdatesAddFieldKindForeignObject(t *testing.T) { { "op": "add", "path": "/Users/Fields/-", "value": {"Name": "foo", "Kind": 16} } ] `, - ExpectedError: "a `Schema` [name] must be provided when adding a new relation field. Field: foo, Kind: 16", - }, - }, - } - testUtils.ExecuteTestCase(t, test) -} - -func TestSchemaUpdatesAddFieldKindForeignObject_InvalidSchemaJson(t *testing.T) { - test := testUtils.TestCase{ - Description: "Test schema update, add field with kind foreign object (16), invalid schema json", - Actions: []any{ - testUtils.SchemaUpdate{ - Schema: ` - type Users { - name: String - } - `, - }, - testUtils.SchemaPatch{ - Patch: ` - [ - { "op": "add", "path": "/Users/Fields/-", "value": {"Name": "foo", "Kind": 16, "Schema": 123} } - ] - `, - ExpectedError: "json: cannot unmarshal number into Go struct field SchemaFieldDescription.Fields.Schema of type string", + ExpectedError: "no type found for given name. Type: 16", }, }, } @@ -67,33 +43,7 @@ func TestSchemaUpdatesAddFieldKindForeignObject_InvalidSchemaJson(t *testing.T) func TestSchemaUpdatesAddFieldKindForeignObject_UnknownSchema(t *testing.T) { test := testUtils.TestCase{ - Description: "Test schema update, add field with kind foreign object (16), unknown schema", - Actions: []any{ - testUtils.SchemaUpdate{ - Schema: ` - type Users { - name: String - } - `, - }, - testUtils.SchemaPatch{ - Patch: ` - [ - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo", "Kind": 16, "Schema": "Unknown" - }} - ] - `, - ExpectedError: "no schema found for given name. Field: foo, Schema: Unknown", - }, - }, - } - testUtils.ExecuteTestCase(t, test) -} - -func TestSchemaUpdatesAddFieldKindForeignObject_MissingRelationName(t *testing.T) { - test := testUtils.TestCase{ - Description: "Test schema update, add field with kind foreign object (16), missing relation name", + Description: "Test schema update, add field with kind foreign object, unknown schema", Actions: []any{ testUtils.SchemaUpdate{ Schema: ` @@ -106,11 +56,11 @@ func TestSchemaUpdatesAddFieldKindForeignObject_MissingRelationName(t *testing.T Patch: ` [ { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo", "Kind": 16, "Schema": "Users" + "Name": "foo", "Kind": "Unknown" }} ] `, - ExpectedError: "missing relation name. Field: foo", + ExpectedError: "no type found for given name. Field: foo, Kind: Unknown", }, }, } @@ -119,7 +69,7 @@ func TestSchemaUpdatesAddFieldKindForeignObject_MissingRelationName(t *testing.T func TestSchemaUpdatesAddFieldKindForeignObject_IDFieldMissingKind(t *testing.T) { test := testUtils.TestCase{ - Description: "Test schema update, add field with kind foreign object (16), id field missing kind", + Description: "Test schema update, add field with kind foreign object, id field missing kind", Actions: []any{ testUtils.SchemaUpdate{ Schema: ` @@ -132,7 +82,7 @@ func TestSchemaUpdatesAddFieldKindForeignObject_IDFieldMissingKind(t *testing.T) Patch: ` [ { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo", "Kind": 16,"IsPrimaryRelation": true, "Schema": "Users", "RelationName": "foo" + "Name": "foo", "Kind": "Users" }}, { "op": "add", "path": "/Users/Fields/-", "value": {"Name": "foo_id"} } ] @@ -146,7 +96,7 @@ func TestSchemaUpdatesAddFieldKindForeignObject_IDFieldMissingKind(t *testing.T) func TestSchemaUpdatesAddFieldKindForeignObject_IDFieldInvalidKind(t *testing.T) { test := testUtils.TestCase{ - Description: "Test schema update, add field with kind foreign object (16), id field invalid kind", + Description: "Test schema update, add field with kind foreign object, id field invalid kind", Actions: []any{ testUtils.SchemaUpdate{ Schema: ` @@ -159,7 +109,7 @@ func TestSchemaUpdatesAddFieldKindForeignObject_IDFieldInvalidKind(t *testing.T) Patch: ` [ { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo", "Kind": 16, "IsPrimaryRelation": true, "Schema": "Users", "RelationName": "foo" + "Name": "foo", "Kind": "Users" }}, { "op": "add", "path": "/Users/Fields/-", "value": {"Name": "foo_id", "Kind": 2} } ] @@ -171,134 +121,11 @@ func TestSchemaUpdatesAddFieldKindForeignObject_IDFieldInvalidKind(t *testing.T) testUtils.ExecuteTestCase(t, test) } -func TestSchemaUpdatesAddFieldKindForeignObject_IDFieldMissingRelationName(t *testing.T) { - test := testUtils.TestCase{ - Description: "Test schema update, add field with kind foreign object (16), id field missing relation name", - Actions: []any{ - testUtils.SchemaUpdate{ - Schema: ` - type Users { - name: String - } - `, - }, - testUtils.SchemaPatch{ - Patch: ` - [ - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo", "Kind": 16, "IsPrimaryRelation": true, "Schema": "Users", "RelationName": "foo" - }}, - { "op": "add", "path": "/Users/Fields/-", "value": {"Name": "foo_id", "Kind": 1} } - ] - `, - ExpectedError: "missing relation name. Field: foo_id", - }, - }, - } - testUtils.ExecuteTestCase(t, test) -} - -func TestSchemaUpdatesAddFieldKindForeignObject_OnlyHalfRelationDefined(t *testing.T) { - test := testUtils.TestCase{ - Description: "Test schema update, add field with kind foreign object (16), only half relation defined", - Actions: []any{ - testUtils.SchemaUpdate{ - Schema: ` - type Users { - name: String - } - `, - }, - testUtils.SchemaPatch{ - Patch: ` - [ - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo", "Kind": 16, "IsPrimaryRelation": true, "Schema": "Users", "RelationName": "foo" - }}, - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo_id", "Kind": 1, "RelationName": "foo" - }} - ] - `, - ExpectedError: "relation must be defined on both schemas. Field: foo, Type: Users", - }, - }, - } - testUtils.ExecuteTestCase(t, test) -} - -func TestSchemaUpdatesAddFieldKindForeignObject_NoPrimaryDefined(t *testing.T) { - test := testUtils.TestCase{ - Description: "Test schema update, add field with kind foreign object (16), no primary defined", - Actions: []any{ - testUtils.SchemaUpdate{ - Schema: ` - type Users { - name: String - } - `, - }, - testUtils.SchemaPatch{ - Patch: ` - [ - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo", "Kind": 16, "Schema": "Users", "RelationName": "foo" - }}, - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo_id", "Kind": 1, "RelationName": "foo" - }}, - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foobar", "Kind": 16, "Schema": "Users", "RelationName": "foo" - }} - ] - `, - ExpectedError: "primary side of relation not defined. RelationName: foo", - }, - }, - } - testUtils.ExecuteTestCase(t, test) -} - -func TestSchemaUpdatesAddFieldKindForeignObject_BothSidesPrimary(t *testing.T) { - test := testUtils.TestCase{ - Description: "Test schema update, add field with kind foreign object (16), both sides primary", - Actions: []any{ - testUtils.SchemaUpdate{ - Schema: ` - type Users { - name: String - } - `, - }, - testUtils.SchemaPatch{ - Patch: ` - [ - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo", "Kind": 16, "IsPrimaryRelation": true, "Schema": "Users", "RelationName": "foo" - }}, - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo_id", "Kind": 1, "RelationName": "foo" - }}, - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foobar", "Kind": 16, "IsPrimaryRelation": true, "Schema": "Users", "RelationName": "foo" - }}, - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foobar_id", "Kind": 1, "Schema": "Users", "RelationName": "foo" - }} - ] - `, - ExpectedError: "both sides of a relation cannot be primary. RelationName: foo", - }, - }, - } - testUtils.ExecuteTestCase(t, test) -} - func TestSchemaUpdatesAddFieldKindForeignObject_Succeeds(t *testing.T) { key1 := "bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad" test := testUtils.TestCase{ - Description: "Test schema update, add field with kind foreign object (16), valid, functional", + Description: "Test schema update, add field with kind foreign object, valid, functional", Actions: []any{ testUtils.SchemaUpdate{ Schema: ` @@ -311,16 +138,10 @@ func TestSchemaUpdatesAddFieldKindForeignObject_Succeeds(t *testing.T) { Patch: ` [ { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo", "Kind": 16, "IsPrimaryRelation": true, "Schema": "Users", "RelationName": "foo" - }}, - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo_id", "Kind": 1, "RelationName": "foo" + "Name": "foo", "Kind": "Users" }}, { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foobar", "Kind": 16, "Schema": "Users", "RelationName": "foo" - }}, - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foobar_id", "Kind": 1, "RelationName": "foo" + "Name": "foo_id", "Kind": 1 }} ] `, @@ -364,89 +185,6 @@ func TestSchemaUpdatesAddFieldKindForeignObject_Succeeds(t *testing.T) { foo { name } - foobar { - name - } - } - }`, - Results: []map[string]any{ - { - "name": "Keenan", - "foo": map[string]any{ - "name": "John", - }, - "foobar": nil, - }, - { - "name": "John", - "foo": nil, - "foobar": map[string]any{ - "name": "Keenan", - }, - }, - }, - }, - }, - } - testUtils.ExecuteTestCase(t, test) -} - -func TestSchemaUpdatesAddFieldKindForeignObject_SinglePrimaryObjectKindSubstitution(t *testing.T) { - key1 := "bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad" - - test := testUtils.TestCase{ - Description: "Test schema update, add field with kind foreign object (16), with single object Kind substitution", - Actions: []any{ - testUtils.SchemaUpdate{ - Schema: ` - type Users { - name: String - } - `, - }, - testUtils.SchemaPatch{ - Patch: ` - [ - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo", "Kind": "Users", "IsPrimaryRelation": true, "Schema": "Users", "RelationName": "foo" - }}, - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo_id", "Kind": 1, "RelationName": "foo" - }}, - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foobar", "Kind": 16, "Schema": "Users", "RelationName": "foo" - }}, - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foobar_id", "Kind": 1, "RelationName": "foo" - }} - ] - `, - }, - testUtils.CreateDoc{ - CollectionID: 0, - Doc: `{ - "name": "John" - }`, - }, - testUtils.CreateDoc{ - CollectionID: 0, - Doc: fmt.Sprintf(`{ - "name": "Keenan", - "foo": "%s" - }`, - key1, - ), - }, - testUtils.Request{ - Request: `query { - Users { - name - foo { - name - } - foobar { - name - } } }`, Results: []map[string]any{ @@ -455,14 +193,10 @@ func TestSchemaUpdatesAddFieldKindForeignObject_SinglePrimaryObjectKindSubstitut "foo": map[string]any{ "name": "John", }, - "foobar": nil, }, { "name": "John", "foo": nil, - "foobar": map[string]any{ - "name": "Keenan", - }, }, }, }, @@ -470,441 +204,3 @@ func TestSchemaUpdatesAddFieldKindForeignObject_SinglePrimaryObjectKindSubstitut } testUtils.ExecuteTestCase(t, test) } - -func TestSchemaUpdatesAddFieldKindForeignObject_SingleSecondaryObjectKindSubstitution(t *testing.T) { - key1 := "bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad" - - test := testUtils.TestCase{ - Description: "Test schema update, add field with kind foreign object (16), with single object Kind substitution", - Actions: []any{ - testUtils.SchemaUpdate{ - Schema: ` - type Users { - name: String - } - `, - }, - testUtils.SchemaPatch{ - Patch: ` - [ - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo", "Kind": 16, "IsPrimaryRelation": true, "Schema": "Users", "RelationName": "foo" - }}, - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo_id", "Kind": 1, "RelationName": "foo" - }}, - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foobar", "Kind": "Users", "Schema": "Users", "RelationName": "foo" - }}, - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foobar_id", "Kind": 1, "RelationName": "foo" - }} - ] - `, - }, - testUtils.CreateDoc{ - CollectionID: 0, - Doc: `{ - "name": "John" - }`, - }, - testUtils.CreateDoc{ - CollectionID: 0, - Doc: fmt.Sprintf(`{ - "name": "Keenan", - "foo": "%s" - }`, - key1, - ), - }, - testUtils.Request{ - Request: `query { - Users { - name - foo { - name - } - foobar { - name - } - } - }`, - Results: []map[string]any{ - { - "name": "Keenan", - "foo": map[string]any{ - "name": "John", - }, - "foobar": nil, - }, - { - "name": "John", - "foo": nil, - "foobar": map[string]any{ - "name": "Keenan", - }, - }, - }, - }, - }, - } - testUtils.ExecuteTestCase(t, test) -} - -func TestSchemaUpdatesAddFieldKindForeignObject_ObjectKindSubstitution(t *testing.T) { - key1 := "bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad" - - test := testUtils.TestCase{ - Description: "Test schema update, add field with kind foreign object (16), with object Kind substitution", - Actions: []any{ - testUtils.SchemaUpdate{ - Schema: ` - type Users { - name: String - } - `, - }, - testUtils.SchemaPatch{ - Patch: ` - [ - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo", "Kind": "Users", "IsPrimaryRelation": true, "Schema": "Users", "RelationName": "foo" - }}, - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo_id", "Kind": 1, "RelationName": "foo" - }}, - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foobar", "Kind": "Users", "Schema": "Users", "RelationName": "foo" - }}, - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foobar_id", "Kind": 1, "RelationName": "foo" - }} - ] - `, - }, - testUtils.CreateDoc{ - CollectionID: 0, - Doc: `{ - "name": "John" - }`, - }, - testUtils.CreateDoc{ - CollectionID: 0, - Doc: fmt.Sprintf(`{ - "name": "Keenan", - "foo": "%s" - }`, - key1, - ), - }, - testUtils.Request{ - Request: `query { - Users { - name - foo { - name - } - foobar { - name - } - } - }`, - Results: []map[string]any{ - { - "name": "Keenan", - "foo": map[string]any{ - "name": "John", - }, - "foobar": nil, - }, - { - "name": "John", - "foo": nil, - "foobar": map[string]any{ - "name": "Keenan", - }, - }, - }, - }, - }, - } - testUtils.ExecuteTestCase(t, test) -} - -func TestSchemaUpdatesAddFieldKindForeignObject_ObjectKindSubstitutionWithAutoSchemaValues(t *testing.T) { - key1 := "bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad" - - test := testUtils.TestCase{ - Description: "Test schema update, add field with kind foreign object (16), with object Kind substitution", - Actions: []any{ - testUtils.SchemaUpdate{ - Schema: ` - type Users { - name: String - } - `, - }, - testUtils.SchemaPatch{ - Patch: ` - [ - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo", "Kind": "Users", "IsPrimaryRelation": true, "RelationName": "foo" - }}, - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo_id", "Kind": 1, "RelationName": "foo" - }}, - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foobar", "Kind": "Users", "RelationName": "foo" - }}, - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foobar_id", "Kind": 1, "RelationName": "foo" - }} - ] - `, - }, - testUtils.CreateDoc{ - CollectionID: 0, - Doc: `{ - "name": "John" - }`, - }, - testUtils.CreateDoc{ - CollectionID: 0, - Doc: fmt.Sprintf(`{ - "name": "Keenan", - "foo": "%s" - }`, - key1, - ), - }, - testUtils.Request{ - Request: `query { - Users { - name - foo { - name - } - foobar { - name - } - } - }`, - Results: []map[string]any{ - { - "name": "Keenan", - "foo": map[string]any{ - "name": "John", - }, - "foobar": nil, - }, - { - "name": "John", - "foo": nil, - "foobar": map[string]any{ - "name": "Keenan", - }, - }, - }, - }, - }, - } - testUtils.ExecuteTestCase(t, test) -} - -func TestSchemaUpdatesAddFieldKindForeignObject_ObjectKindAndSchemaMismatch(t *testing.T) { - test := testUtils.TestCase{ - Description: "Test schema update, add field with kind foreign object (16), with Kind and Schema mismatch", - Actions: []any{ - testUtils.SchemaUpdate{ - Schema: ` - type Users { - name: String - } - `, - }, - testUtils.SchemaUpdate{ - Schema: ` - type Dog { - name: String - } - `, - }, - testUtils.SchemaPatch{ - Patch: ` - [ - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo", "Kind": "Users", "IsPrimaryRelation": true, "Schema": "Dog", "RelationName": "foo" - }}, - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo_id", "Kind": 1, "RelationName": "foo" - }}, - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foobar", "Kind": "Users", "RelationName": "foo" - }}, - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foobar_id", "Kind": 1, "RelationName": "foo" - }} - ] - `, - ExpectedError: "field Kind does not match field Schema. Kind: Users, Schema: Dog", - }, - }, - } - testUtils.ExecuteTestCase(t, test) -} - -func TestSchemaUpdatesAddFieldKindForeignObject_MissingPrimaryIDField(t *testing.T) { - key1 := "bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad" - - test := testUtils.TestCase{ - Description: "Test schema update, add field with kind foreign object (16), with auto primary ID field creation", - Actions: []any{ - testUtils.SchemaUpdate{ - Schema: ` - type Users { - name: String - } - `, - }, - testUtils.SchemaPatch{ - Patch: ` - [ - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo", "Kind": "Users", "IsPrimaryRelation": true, "RelationName": "foo" - }}, - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foobar", "Kind": "Users", "RelationName": "foo" - }}, - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foobar_id", "Kind": 1, "RelationName": "foo" - }} - ] - `, - }, - testUtils.CreateDoc{ - CollectionID: 0, - Doc: `{ - "name": "John" - }`, - }, - testUtils.CreateDoc{ - CollectionID: 0, - Doc: fmt.Sprintf(`{ - "name": "Keenan", - "foo": "%s" - }`, - key1, - ), - }, - testUtils.Request{ - Request: `query { - Users { - name - foo { - name - } - foobar { - name - } - } - }`, - Results: []map[string]any{ - { - "name": "Keenan", - "foo": map[string]any{ - "name": "John", - }, - "foobar": nil, - }, - { - "name": "John", - "foo": nil, - "foobar": map[string]any{ - "name": "Keenan", - }, - }, - }, - }, - }, - } - - testUtils.ExecuteTestCase(t, test) -} - -func TestSchemaUpdatesAddFieldKindForeignObject_MissingSecondaryIDField(t *testing.T) { - key1 := "bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad" - - test := testUtils.TestCase{ - Description: "Test schema update, add field with kind foreign object (16), with auto secondary ID field creation", - Actions: []any{ - testUtils.SchemaUpdate{ - Schema: ` - type Users { - name: String - } - `, - }, - testUtils.SchemaPatch{ - Patch: ` - [ - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo", "Kind": "Users", "IsPrimaryRelation": true, "RelationName": "foo" - }}, - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo_id", "Kind": 1, "RelationName": "foo" - }}, - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foobar", "Kind": "Users", "RelationName": "foo" - }} - ] - `, - }, - testUtils.CreateDoc{ - CollectionID: 0, - Doc: `{ - "name": "John" - }`, - }, - testUtils.CreateDoc{ - CollectionID: 0, - Doc: fmt.Sprintf(`{ - "name": "Keenan", - "foo": "%s" - }`, - key1, - ), - }, - testUtils.Request{ - Request: `query { - Users { - name - foo { - name - } - foobar { - name - } - } - }`, - Results: []map[string]any{ - { - "name": "Keenan", - "foo": map[string]any{ - "name": "John", - }, - "foobar": nil, - }, - { - "name": "John", - "foo": nil, - "foobar": map[string]any{ - "name": "Keenan", - }, - }, - }, - }, - }, - } - - testUtils.ExecuteTestCase(t, test) -} diff --git a/tests/integration/schema/updates/add/field/kind/invalid_test.go b/tests/integration/schema/updates/add/field/kind/invalid_test.go index b9c6dbbf31..331804a100 100644 --- a/tests/integration/schema/updates/add/field/kind/invalid_test.go +++ b/tests/integration/schema/updates/add/field/kind/invalid_test.go @@ -140,9 +140,9 @@ func TestSchemaUpdatesAddFieldKind198(t *testing.T) { testUtils.ExecuteTestCase(t, test) } -func TestSchemaUpdatesAddFieldKindInvalidSubstitution(t *testing.T) { +func TestSchemaUpdatesAddFieldKindInvalid(t *testing.T) { test := testUtils.TestCase{ - Description: "Test schema update, add field with kind unsupported (198)", + Description: "Test schema update, add field with kind unsupported", Actions: []any{ testUtils.SchemaUpdate{ Schema: ` @@ -157,7 +157,7 @@ func TestSchemaUpdatesAddFieldKindInvalidSubstitution(t *testing.T) { { "op": "add", "path": "/Users/Fields/-", "value": {"Name": "foo", "Kind": "InvalidKind"} } ] `, - ExpectedError: "no type found for given name. Kind: InvalidKind", + ExpectedError: "no type found for given name. Field: foo, Kind: InvalidKind", }, }, } diff --git a/tests/integration/schema/updates/add/field/simple_test.go b/tests/integration/schema/updates/add/field/simple_test.go index c505668325..80aaec32d6 100644 --- a/tests/integration/schema/updates/add/field/simple_test.go +++ b/tests/integration/schema/updates/add/field/simple_test.go @@ -20,8 +20,8 @@ import ( ) func TestSchemaUpdatesAddFieldSimple(t *testing.T) { - schemaVersion1ID := "bafkreiebcgze3rs6j3g7gu65dwskdg5fn3qby5c6nqffhbdkcy2l5bbvp4" - schemaVersion2ID := "bafkreidn4f3i52756wevi3sfpbqzijgy6v24zh565pmvtmpqr4ou52v2q4" + schemaVersion1ID := "bafkreia3o3cetvcnnxyu5spucimoos77ifungfmacxdkva4zah2is3aooe" + schemaVersion2ID := "bafkreibz4g6rkxanzn6ro74ezmbwoe5hvcguwvi34judrk2kfuqqtk5ak4" test := testUtils.TestCase{ Description: "Test schema update, add field", @@ -115,8 +115,8 @@ func TestSchemaUpdates_AddFieldSimpleDoNotSetDefault_Errors(t *testing.T) { } func TestSchemaUpdates_AddFieldSimpleDoNotSetDefault_VersionIsQueryable(t *testing.T) { - schemaVersion1ID := "bafkreiebcgze3rs6j3g7gu65dwskdg5fn3qby5c6nqffhbdkcy2l5bbvp4" - schemaVersion2ID := "bafkreidn4f3i52756wevi3sfpbqzijgy6v24zh565pmvtmpqr4ou52v2q4" + schemaVersion1ID := "bafkreia3o3cetvcnnxyu5spucimoos77ifungfmacxdkva4zah2is3aooe" + schemaVersion2ID := "bafkreibz4g6rkxanzn6ro74ezmbwoe5hvcguwvi34judrk2kfuqqtk5ak4" test := testUtils.TestCase{ Description: "Test schema update, add field", diff --git a/tests/integration/schema/updates/index/simple_test.go b/tests/integration/schema/updates/add/field/with_index_test.go similarity index 88% rename from tests/integration/schema/updates/index/simple_test.go rename to tests/integration/schema/updates/add/field/with_index_test.go index fb506ec623..52815789f8 100644 --- a/tests/integration/schema/updates/index/simple_test.go +++ b/tests/integration/schema/updates/add/field/with_index_test.go @@ -1,4 +1,4 @@ -// Copyright 2023 Democratized Data Foundation +// Copyright 2024 Democratized Data Foundation // // Use of this software is governed by the Business Source License // included in the file licenses/BSL.txt. @@ -8,7 +8,7 @@ // by the Apache License, Version 2.0, included in the file // licenses/APL.txt. -package index +package field import ( "testing" @@ -16,7 +16,7 @@ import ( testUtils "github.com/sourcenetwork/defradb/tests/integration" ) -func TestPatching_ForCollectionWithIndex_StillWorks(t *testing.T) { +func TestSchemaUpdatesAddFieldSimple_WithExistingIndex(t *testing.T) { test := testUtils.TestCase{ Description: "Test patching schema for collection with index still works", Actions: []any{ diff --git a/tests/integration/schema/updates/add/simple_test.go b/tests/integration/schema/updates/add/simple_test.go index 0eac29b49a..88d36680b0 100644 --- a/tests/integration/schema/updates/add/simple_test.go +++ b/tests/integration/schema/updates/add/simple_test.go @@ -33,7 +33,7 @@ func TestSchemaUpdatesAddSimpleErrorsAddingSchema(t *testing.T) { { "op": "add", "path": "/-", "value": {"Name": "books"} } ] `, - ExpectedError: "unknown collection, adding collections via patch is not supported. Name: books", + ExpectedError: "adding collections via patch is not supported. Name: books", }, testUtils.Request{ Request: `query { diff --git a/tests/integration/schema/updates/copy/field/simple_test.go b/tests/integration/schema/updates/copy/field/simple_test.go index 5721a9fb8b..a2c631a515 100644 --- a/tests/integration/schema/updates/copy/field/simple_test.go +++ b/tests/integration/schema/updates/copy/field/simple_test.go @@ -154,12 +154,12 @@ func TestSchemaUpdatesCopyFieldAndReplaceNameAndInvalidKindSubstitution(t *testi // re-typing the clone. Patch: ` [ - { "op": "copy", "from": "/Users/Schema/Fields/1", "path": "/Users/Fields/2" }, + { "op": "copy", "from": "/Users/Fields/1", "path": "/Users/Fields/2" }, { "op": "replace", "path": "/Users/Fields/2/Name", "value": "Age" }, { "op": "replace", "path": "/Users/Fields/2/Kind", "value": "NotAValidKind" } ] `, - ExpectedError: "no type found for given name. Kind: NotAValidKind", + ExpectedError: "no type found for given name. Field: Age, Kind: NotAValidKind", }, }, } diff --git a/tests/integration/schema/updates/copy/simple_test.go b/tests/integration/schema/updates/copy/simple_test.go index 206cd49b52..cdda8abaf8 100644 --- a/tests/integration/schema/updates/copy/simple_test.go +++ b/tests/integration/schema/updates/copy/simple_test.go @@ -38,7 +38,7 @@ func TestSchemaUpdatesCopyCollectionWithRemoveIDAndReplaceName(t *testing.T) { { "op": "replace", "path": "/Book/Name", "value": "Book" } ] `, - ExpectedError: "unknown collection, adding collections via patch is not supported. Name: Book", + ExpectedError: "adding collections via patch is not supported. Name: Book", }, }, } diff --git a/tests/integration/schema/updates/move/simple_test.go b/tests/integration/schema/updates/move/simple_test.go index 94ecfcf1bb..9898430e0f 100644 --- a/tests/integration/schema/updates/move/simple_test.go +++ b/tests/integration/schema/updates/move/simple_test.go @@ -17,7 +17,7 @@ import ( ) func TestSchemaUpdatesMoveCollectionDoesNothing(t *testing.T) { - schemaVersionID := "bafkreiebcgze3rs6j3g7gu65dwskdg5fn3qby5c6nqffhbdkcy2l5bbvp4" + schemaVersionID := "bafkreia3o3cetvcnnxyu5spucimoos77ifungfmacxdkva4zah2is3aooe" test := testUtils.TestCase{ Description: "Test schema update, move collection", diff --git a/tests/integration/schema/updates/remove/fields/simple_test.go b/tests/integration/schema/updates/remove/fields/simple_test.go index ef2ed6f6db..ce9b8112f0 100644 --- a/tests/integration/schema/updates/remove/fields/simple_test.go +++ b/tests/integration/schema/updates/remove/fields/simple_test.go @@ -140,61 +140,3 @@ func TestSchemaUpdatesRemoveFieldTypErrors(t *testing.T) { } testUtils.ExecuteTestCase(t, test) } - -func TestSchemaUpdatesRemoveFieldSchemaErrors(t *testing.T) { - test := testUtils.TestCase{ - Description: "Test schema update, remove field Schema", - Actions: []any{ - testUtils.SchemaUpdate{ - Schema: ` - type Author { - name: String - book: [Book] - } - type Book { - name: String - author: Author - } - `, - }, - testUtils.SchemaPatch{ - Patch: ` - [ - { "op": "remove", "path": "/Author/Fields/1/Schema" } - ] - `, - ExpectedError: "mutating an existing field is not supported. ProposedName: book", - }, - }, - } - testUtils.ExecuteTestCase(t, test) -} - -func TestSchemaUpdatesRemoveFieldRelationNameErrors(t *testing.T) { - test := testUtils.TestCase{ - Description: "Test schema update, remove field RelationName", - Actions: []any{ - testUtils.SchemaUpdate{ - Schema: ` - type Author { - name: String - book: [Book] - } - type Book { - name: String - author: Author - } - `, - }, - testUtils.SchemaPatch{ - Patch: ` - [ - { "op": "remove", "path": "/Author/Fields/1/RelationName" } - ] - `, - ExpectedError: "mutating an existing field is not supported. ProposedName: book", - }, - }, - } - testUtils.ExecuteTestCase(t, test) -} diff --git a/tests/integration/schema/updates/replace/simple_test.go b/tests/integration/schema/updates/replace/simple_test.go index 7729a274c9..722ff36f9b 100644 --- a/tests/integration/schema/updates/replace/simple_test.go +++ b/tests/integration/schema/updates/replace/simple_test.go @@ -44,7 +44,7 @@ func TestSchemaUpdatesReplaceCollectionErrors(t *testing.T) { // WARNING: An error is still expected if/when we allow the adding of collections, as this also // implies that the "Users" collection is to be deleted. Only once we support the adding *and* // removal of collections should this not error. - ExpectedError: "unknown collection, adding collections via patch is not supported. Name: Book", + ExpectedError: "adding collections via patch is not supported. Name: Book", }, }, } diff --git a/tests/integration/schema/updates/test/field/simple_test.go b/tests/integration/schema/updates/test/field/simple_test.go index afde980f97..35cba3ec29 100644 --- a/tests/integration/schema/updates/test/field/simple_test.go +++ b/tests/integration/schema/updates/test/field/simple_test.go @@ -102,7 +102,7 @@ func TestSchemaUpdatesTestFieldPasses(t *testing.T) { Patch: ` [ { "op": "test", "path": "/Users/Fields/1", "value": { - "Name": "name", "Kind": 11, "Schema":"", "IsPrimaryRelation":false, "RelationName":"", "Typ":1 + "Name": "name", "Kind": 11, "Typ":1 } } ] `, @@ -127,7 +127,7 @@ func TestSchemaUpdatesTestFieldPasses_UsingFieldNameAsIndex(t *testing.T) { Patch: ` [ { "op": "test", "path": "/Users/Fields/name", "value": { - "Kind": 11, "Schema":"", "IsPrimaryRelation":false, "RelationName":"", "Typ":1 + "Kind": 11, "Typ":1 } } ] `, diff --git a/tests/integration/schema/updates/with_schema_branch_test.go b/tests/integration/schema/updates/with_schema_branch_test.go index e6e6e6e850..d8f7d1afc2 100644 --- a/tests/integration/schema/updates/with_schema_branch_test.go +++ b/tests/integration/schema/updates/with_schema_branch_test.go @@ -20,9 +20,9 @@ import ( ) func TestSchemaUpdates_WithBranchingSchema(t *testing.T) { - schemaVersion1ID := "bafkreiebcgze3rs6j3g7gu65dwskdg5fn3qby5c6nqffhbdkcy2l5bbvp4" - schemaVersion2ID := "bafkreidn4f3i52756wevi3sfpbqzijgy6v24zh565pmvtmpqr4ou52v2q4" - schemaVersion3ID := "bafkreieilqyv4bydakul5tbikpysmzwhzvxdau4twcny5n46zvxhkv7oli" + schemaVersion1ID := "bafkreia3o3cetvcnnxyu5spucimoos77ifungfmacxdkva4zah2is3aooe" + schemaVersion2ID := "bafkreibz4g6rkxanzn6ro74ezmbwoe5hvcguwvi34judrk2kfuqqtk5ak4" + schemaVersion3ID := "bafkreifswbi23wxvq2zpqnoldolsxk2fhtj5t6rs3pidil3j6tybc62q3m" test := testUtils.TestCase{ Description: "Test schema update, with branching schema", @@ -169,10 +169,10 @@ func TestSchemaUpdates_WithBranchingSchema(t *testing.T) { } func TestSchemaUpdates_WithPatchOnBranchedSchema(t *testing.T) { - schemaVersion1ID := "bafkreiebcgze3rs6j3g7gu65dwskdg5fn3qby5c6nqffhbdkcy2l5bbvp4" - schemaVersion2ID := "bafkreidn4f3i52756wevi3sfpbqzijgy6v24zh565pmvtmpqr4ou52v2q4" - schemaVersion3ID := "bafkreieilqyv4bydakul5tbikpysmzwhzvxdau4twcny5n46zvxhkv7oli" - schemaVersion4ID := "bafkreicy4llechrh44zwviafs2ptjnr7sloiajjvpp7buaknhwspfevnt4" + schemaVersion1ID := "bafkreia3o3cetvcnnxyu5spucimoos77ifungfmacxdkva4zah2is3aooe" + schemaVersion2ID := "bafkreibz4g6rkxanzn6ro74ezmbwoe5hvcguwvi34judrk2kfuqqtk5ak4" + schemaVersion3ID := "bafkreifswbi23wxvq2zpqnoldolsxk2fhtj5t6rs3pidil3j6tybc62q3m" + schemaVersion4ID := "bafkreid4ulxeclzgpzhznge7zdin6docxvklugvr6gt4jxfyanz5i2r2hu" test := testUtils.TestCase{ Description: "Test schema update, with patch on branching schema", @@ -307,9 +307,9 @@ func TestSchemaUpdates_WithPatchOnBranchedSchema(t *testing.T) { } func TestSchemaUpdates_WithBranchingSchemaAndSetActiveSchemaToOtherBranch(t *testing.T) { - schemaVersion1ID := "bafkreiebcgze3rs6j3g7gu65dwskdg5fn3qby5c6nqffhbdkcy2l5bbvp4" - schemaVersion2ID := "bafkreidn4f3i52756wevi3sfpbqzijgy6v24zh565pmvtmpqr4ou52v2q4" - schemaVersion3ID := "bafkreieilqyv4bydakul5tbikpysmzwhzvxdau4twcny5n46zvxhkv7oli" + schemaVersion1ID := "bafkreia3o3cetvcnnxyu5spucimoos77ifungfmacxdkva4zah2is3aooe" + schemaVersion2ID := "bafkreibz4g6rkxanzn6ro74ezmbwoe5hvcguwvi34judrk2kfuqqtk5ak4" + schemaVersion3ID := "bafkreifswbi23wxvq2zpqnoldolsxk2fhtj5t6rs3pidil3j6tybc62q3m" test := testUtils.TestCase{ Description: "Test schema update, with branching schema toggling between branches", @@ -403,10 +403,10 @@ func TestSchemaUpdates_WithBranchingSchemaAndSetActiveSchemaToOtherBranch(t *tes } func TestSchemaUpdates_WithBranchingSchemaAndSetActiveSchemaToOtherBranchThenPatch(t *testing.T) { - schemaVersion1ID := "bafkreiebcgze3rs6j3g7gu65dwskdg5fn3qby5c6nqffhbdkcy2l5bbvp4" - schemaVersion2ID := "bafkreidn4f3i52756wevi3sfpbqzijgy6v24zh565pmvtmpqr4ou52v2q4" - schemaVersion3ID := "bafkreieilqyv4bydakul5tbikpysmzwhzvxdau4twcny5n46zvxhkv7oli" - schemaVersion4ID := "bafkreict4nqhcurfkjskxlek3djpep2acwlfkztughoum4dsvuwigkfqzi" + schemaVersion1ID := "bafkreia3o3cetvcnnxyu5spucimoos77ifungfmacxdkva4zah2is3aooe" + schemaVersion2ID := "bafkreibz4g6rkxanzn6ro74ezmbwoe5hvcguwvi34judrk2kfuqqtk5ak4" + schemaVersion3ID := "bafkreifswbi23wxvq2zpqnoldolsxk2fhtj5t6rs3pidil3j6tybc62q3m" + schemaVersion4ID := "bafkreidjuyxhakc5yx7fucunoxijnfjvgqohf4sjoryzf27mqxidh37kne" test := testUtils.TestCase{ Description: "Test schema update, with branching schema toggling between branches then patch", @@ -545,7 +545,7 @@ func TestSchemaUpdates_WithBranchingSchemaAndSetActiveSchemaToOtherBranchThenPat } func TestSchemaUpdates_WithBranchingSchemaAndGetCollectionAtVersion(t *testing.T) { - schemaVersion1ID := "bafkreiebcgze3rs6j3g7gu65dwskdg5fn3qby5c6nqffhbdkcy2l5bbvp4" + schemaVersion1ID := "bafkreia3o3cetvcnnxyu5spucimoos77ifungfmacxdkva4zah2is3aooe" test := testUtils.TestCase{ Description: `Test schema update, with branching schema toggling between branches and gets the diff --git a/tests/integration/schema/with_update_set_default_test.go b/tests/integration/schema/with_update_set_default_test.go index e5179eb814..f46e0540e3 100644 --- a/tests/integration/schema/with_update_set_default_test.go +++ b/tests/integration/schema/with_update_set_default_test.go @@ -92,7 +92,7 @@ func TestSchema_WithUpdateAndSetDefaultVersionToOriginal_NewFieldIsNotQueriable( SetAsDefaultVersion: immutable.Some(false), }, testUtils.SetActiveSchemaVersion{ - SchemaVersionID: "bafkreiebcgze3rs6j3g7gu65dwskdg5fn3qby5c6nqffhbdkcy2l5bbvp4", + SchemaVersionID: "bafkreia3o3cetvcnnxyu5spucimoos77ifungfmacxdkva4zah2is3aooe", }, testUtils.Request{ Request: `query { @@ -129,7 +129,7 @@ func TestSchema_WithUpdateAndSetDefaultVersionToNew_AllowsQueryingOfNewField(t * SetAsDefaultVersion: immutable.Some(false), }, testUtils.SetActiveSchemaVersion{ - SchemaVersionID: "bafkreidn4f3i52756wevi3sfpbqzijgy6v24zh565pmvtmpqr4ou52v2q4", + SchemaVersionID: "bafkreibz4g6rkxanzn6ro74ezmbwoe5hvcguwvi34judrk2kfuqqtk5ak4", }, testUtils.Request{ Request: `query { diff --git a/tests/integration/state.go b/tests/integration/state.go index 25a248413b..49030c82a6 100644 --- a/tests/integration/state.go +++ b/tests/integration/state.go @@ -112,5 +112,6 @@ func newState( collectionNames: collectionNames, documents: [][]*client.Document{}, indexes: [][][]client.IndexDescription{}, + isBench: false, } } diff --git a/tests/integration/test_case.go b/tests/integration/test_case.go index ce6e456fbb..fa1629d0ef 100644 --- a/tests/integration/test_case.go +++ b/tests/integration/test_case.go @@ -38,6 +38,13 @@ type TestCase struct { // This is to only be used in the very rare cases where we really do want behavioural // differences between mutation types, or we need to temporarily document a bug. SupportedMutationTypes immutable.Option[[]MutationType] + + // If provided a value, SupportedClientTypes will limit the client types under test to those + // within this set. If no active clients pass this filter the test will be skipped. + // + // This is to only be used in the very rare cases where we really do want behavioural + // differences between client types, or we need to temporarily document a bug. + SupportedClientTypes immutable.Option[[]ClientType] } // SetupComplete is a flag to explicitly notify the change detector at which point @@ -97,6 +104,18 @@ type SchemaPatch struct { ExpectedError string } +type PatchCollection struct { + // NodeID may hold the ID (index) of a node to apply this patch to. + // + // If a value is not provided the patch will be applied to all nodes. + NodeID immutable.Option[int] + + // The Patch to apply to the collection description. + Patch string + + ExpectedError string +} + // GetSchema is an action that fetches schema using the provided options. type GetSchema struct { // NodeID may hold the ID (index) of a node to apply this patch to. @@ -193,6 +212,14 @@ type CreateDoc struct { // If a value is not provided the document will be created in all nodes. NodeID immutable.Option[int] + // The identity of this request. Optional. + // + // If an Identity is not provided the created document(s) will be public. + // + // If an Identity is provided and the collection has a policy, then the + // created document(s) will be owned by this Identity. + Identity string + // The collection in which this document should be created. CollectionID int @@ -214,6 +241,14 @@ type DeleteDoc struct { // If a value is not provided the document will be created in all nodes. NodeID immutable.Option[int] + // The identity of this request. Optional. + // + // If an Identity is not provided then can only delete public document(s). + // + // If an Identity is provided and the collection has a policy, then + // can also delete private document(s) that are owned by this Identity. + Identity string + // The collection in which this document should be deleted. CollectionID int @@ -239,6 +274,14 @@ type UpdateDoc struct { // If a value is not provided the update will be applied to all nodes. NodeID immutable.Option[int] + // The identity of this request. Optional. + // + // If an Identity is not provided then can only update public document(s). + // + // If an Identity is provided and the collection has a policy, then + // can also update private document(s) that are owned by this Identity. + Identity string + // The collection in which this document exists. CollectionID int @@ -385,6 +428,14 @@ type Request struct { // in which case the expected results must all match across all nodes. NodeID immutable.Option[int] + // The identity of this request. Optional. + // + // If an Identity is not provided then can only operate over public document(s). + // + // If an Identity is provided and the collection has a policy, then can + // operate over private document(s) that are owned by this Identity. + Identity string + // Used to identify the transaction for this to run against. Optional. TransactionID immutable.Option[int] diff --git a/tests/integration/utils2.go b/tests/integration/utils2.go index d5cdcbd01d..4821d06b32 100644 --- a/tests/integration/utils2.go +++ b/tests/integration/utils2.go @@ -15,6 +15,7 @@ import ( "fmt" "os" "reflect" + "strconv" "strings" "testing" "time" @@ -22,15 +23,17 @@ import ( "github.com/bxcodec/faker/support/slice" "github.com/fxamacker/cbor/v2" "github.com/libp2p/go-libp2p/core/crypto" + "github.com/sourcenetwork/corelog" "github.com/sourcenetwork/immutable" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" + acpIdentity "github.com/sourcenetwork/defradb/acp/identity" "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/datastore" badgerds "github.com/sourcenetwork/defradb/datastore/badger/v4" + "github.com/sourcenetwork/defradb/db" "github.com/sourcenetwork/defradb/errors" - "github.com/sourcenetwork/defradb/logging" "github.com/sourcenetwork/defradb/net" "github.com/sourcenetwork/defradb/request/graphql" changeDetector "github.com/sourcenetwork/defradb/tests/change_detector" @@ -39,7 +42,10 @@ import ( "github.com/sourcenetwork/defradb/tests/predefined" ) -const mutationTypeEnvName = "DEFRA_MUTATION_TYPE" +const ( + mutationTypeEnvName = "DEFRA_MUTATION_TYPE" + skipNetworkTestsEnvName = "DEFRA_SKIP_NETWORK_TESTS" +) // The MutationType that tests will run using. // @@ -69,8 +75,10 @@ const ( ) var ( - log = logging.MustNewLogger("tests.integration") + log = corelog.NewLogger("tests.integration") mutationType MutationType + // skipNetworkTests will skip any tests that involve network actions + skipNetworkTests = false ) const ( @@ -94,6 +102,9 @@ func init() { // mutation type. mutationType = CollectionSaveMutationType } + if value, ok := os.LookupEnv(skipNetworkTestsEnvName); ok { + skipNetworkTests, _ = strconv.ParseBool(value) + } } // AssertPanic asserts that the code inside the specified PanicTestFunc panics. @@ -130,6 +141,7 @@ func ExecuteTestCase( collectionNames := getCollectionNames(testCase) changeDetector.PreTestChecks(t, collectionNames) skipIfMutationTypeUnsupported(t, testCase.SupportedMutationTypes) + skipIfNetworkTest(t, testCase.Actions) var clients []ClientType if httpClient { @@ -158,6 +170,8 @@ func ExecuteTestCase( require.NotEmpty(t, databases) require.NotEmpty(t, clients) + clients = skipIfClientTypeUnsupported(t, clients, testCase.SupportedClientTypes) + ctx := context.Background() for _, ct := range clients { for _, dbt := range databases { @@ -174,18 +188,19 @@ func executeTestCase( dbt DatabaseType, clientType ClientType, ) { - log.Info( + log.InfoContext( ctx, testCase.Description, - logging.NewKV("database", dbt), - logging.NewKV("client", clientType), - logging.NewKV("mutationType", mutationType), - logging.NewKV("databaseDir", databaseDir), - logging.NewKV("changeDetector.Enabled", changeDetector.Enabled), - logging.NewKV("changeDetector.SetupOnly", changeDetector.SetupOnly), - logging.NewKV("changeDetector.SourceBranch", changeDetector.SourceBranch), - logging.NewKV("changeDetector.TargetBranch", changeDetector.TargetBranch), - logging.NewKV("changeDetector.Repository", changeDetector.Repository), + corelog.Any("database", dbt), + corelog.Any("client", clientType), + corelog.Any("mutationType", mutationType), + corelog.String("databaseDir", databaseDir), + corelog.Bool("skipNetworkTests", skipNetworkTests), + corelog.Bool("changeDetector.Enabled", changeDetector.Enabled), + corelog.Bool("changeDetector.SetupOnly", changeDetector.SetupOnly), + corelog.String("changeDetector.SourceBranch", changeDetector.SourceBranch), + corelog.String("changeDetector.TargetBranch", changeDetector.TargetBranch), + corelog.String("changeDetector.Repository", changeDetector.Repository), ) startActionIndex, endActionIndex := getActionRange(t, testCase) @@ -260,6 +275,9 @@ func performAction( case SchemaPatch: patchSchema(s, action) + case PatchCollection: + patchCollection(s, action) + case GetSchema: getSchema(s, action) @@ -275,6 +293,9 @@ func performAction( case ConfigureMigration: configureMigration(s, action) + case AddPolicy: + addPolicyACP(s, action) + case CreateDoc: createDoc(s, action) @@ -777,7 +798,7 @@ func configureNode( n, err = net.NewNode(s.ctx, db, nodeOpts...) require.NoError(s.t, err) - log.Info(s.ctx, "Starting P2P node", logging.NewKV("P2P address", n.PeerInfo())) + log.InfoContext(s.ctx, "Starting P2P node", corelog.Any("P2P address", n.PeerInfo())) if err := n.Start(); err != nil { n.Close() require.NoError(s.t, err) @@ -822,16 +843,17 @@ func refreshDocuments( // We need to add the existing documents in the order in which the test case lists them // otherwise they cannot be referenced correctly by other actions. - doc, err := client.NewDocFromJSON([]byte(action.Doc), collection.Schema()) + doc, err := client.NewDocFromJSON([]byte(action.Doc), collection.Definition()) if err != nil { // If an err has been returned, ignore it - it may be expected and if not // the test will fail later anyway continue } + ctx := db.SetContextIdentity(s.ctx, acpIdentity.New(action.Identity)) // The document may have been mutated by other actions, so to be sure we have the latest // version without having to worry about the individual update mechanics we fetch it. - doc, err = collection.Get(s.ctx, doc.ID(), false) + doc, err = collection.Get(ctx, doc.ID(), false) if err != nil { // If an err has been returned, ignore it - it may be expected and if not // the test will fail later anyway @@ -1005,6 +1027,22 @@ func patchSchema( refreshIndexes(s) } +func patchCollection( + s *state, + action PatchCollection, +) { + for _, node := range getNodes(action.NodeID, s.nodes) { + err := node.PatchCollection(s.ctx, action.Patch) + expectedErrorRaised := AssertError(s.t, s.testCase.Description, err, action.ExpectedError) + + assertExpectedErrorRaised(s.t, s.testCase.Description, action.ExpectedError, expectedErrorRaised) + } + + // If the schema was updated we need to refresh the collection definitions. + refreshCollections(s) + refreshIndexes(s) +} + func getSchema( s *state, action GetSchema, @@ -1041,8 +1079,9 @@ func getCollections( action GetCollections, ) { for _, node := range getNodes(action.NodeID, s.nodes) { - db := getStore(s, node, action.TransactionID, "") - results, err := db.GetCollections(s.ctx, action.FilterOptions) + txn := getTransaction(s, node, action.TransactionID, "") + ctx := db.SetContextTxn(s.ctx, txn) + results, err := node.GetCollections(ctx, action.FilterOptions) expectedErrorRaised := AssertError(s.t, s.testCase.Description, err, action.ExpectedError) assertExpectedErrorRaised(s.t, s.testCase.Description, action.ExpectedError, expectedErrorRaised) @@ -1158,12 +1197,17 @@ func createDocViaColSave( collections []client.Collection, ) (*client.Document, error) { var err error - doc, err := client.NewDocFromJSON([]byte(action.Doc), collections[action.CollectionID].Schema()) + doc, err := client.NewDocFromJSON([]byte(action.Doc), collections[action.CollectionID].Definition()) if err != nil { return nil, err } - return doc, collections[action.CollectionID].Save(s.ctx, doc) + txn := getTransaction(s, node, immutable.None[int](), action.ExpectedError) + + ctx := db.SetContextTxn(s.ctx, txn) + ctx = db.SetContextIdentity(ctx, acpIdentity.New(action.Identity)) + + return doc, collections[action.CollectionID].Save(ctx, doc) } func createDocViaColCreate( @@ -1173,12 +1217,17 @@ func createDocViaColCreate( collections []client.Collection, ) (*client.Document, error) { var err error - doc, err := client.NewDocFromJSON([]byte(action.Doc), collections[action.CollectionID].Schema()) + doc, err := client.NewDocFromJSON([]byte(action.Doc), collections[action.CollectionID].Definition()) if err != nil { return nil, err } - return doc, collections[action.CollectionID].Create(s.ctx, doc) + txn := getTransaction(s, node, immutable.None[int](), action.ExpectedError) + + ctx := db.SetContextTxn(s.ctx, txn) + ctx = db.SetContextIdentity(ctx, acpIdentity.New(action.Identity)) + + return doc, collections[action.CollectionID].Create(ctx, doc) } func createDocViaGQL( @@ -1202,9 +1251,15 @@ func createDocViaGQL( input, ) - db := getStore(s, node, immutable.None[int](), action.ExpectedError) + txn := getTransaction(s, node, immutable.None[int](), action.ExpectedError) - result := db.ExecRequest(s.ctx, request) + ctx := db.SetContextTxn(s.ctx, txn) + ctx = db.SetContextIdentity(ctx, acpIdentity.New(action.Identity)) + + result := node.ExecRequest( + ctx, + request, + ) if len(result.GQL.Errors) > 0 { return nil, result.GQL.Errors[0] } @@ -1218,7 +1273,7 @@ func createDocViaGQL( docID, err := client.NewDocIDFromString(docIDString) require.NoError(s.t, err) - doc, err := collection.Get(s.ctx, docID, false) + doc, err := collection.Get(ctx, docID, false) require.NoError(s.t, err) return doc, nil @@ -1231,6 +1286,7 @@ func deleteDoc( action DeleteDoc, ) { doc := s.documents[action.CollectionID][action.DocID] + ctx := db.SetContextIdentity(s.ctx, acpIdentity.New(action.Identity)) var expectedErrorRaised bool actionNodes := getNodes(action.NodeID, s.nodes) @@ -1239,7 +1295,7 @@ func deleteDoc( actionNodes, nodeID, func() error { - _, err := collections[action.CollectionID].DeleteWithDocID(s.ctx, doc.ID()) + _, err := collections[action.CollectionID].Delete(ctx, doc.ID()) return err }, ) @@ -1288,8 +1344,9 @@ func updateDocViaColSave( collections []client.Collection, ) error { cachedDoc := s.documents[action.CollectionID][action.DocID] + ctx := db.SetContextIdentity(s.ctx, acpIdentity.New(action.Identity)) - doc, err := collections[action.CollectionID].Get(s.ctx, cachedDoc.ID(), true) + doc, err := collections[action.CollectionID].Get(ctx, cachedDoc.ID(), true) if err != nil { return err } @@ -1301,7 +1358,10 @@ func updateDocViaColSave( s.documents[action.CollectionID][action.DocID] = doc - return collections[action.CollectionID].Save(s.ctx, doc) + return collections[action.CollectionID].Save( + ctx, + doc, + ) } func updateDocViaColUpdate( @@ -1311,8 +1371,9 @@ func updateDocViaColUpdate( collections []client.Collection, ) error { cachedDoc := s.documents[action.CollectionID][action.DocID] + ctx := db.SetContextIdentity(s.ctx, acpIdentity.New(action.Identity)) - doc, err := collections[action.CollectionID].Get(s.ctx, cachedDoc.ID(), true) + doc, err := collections[action.CollectionID].Get(ctx, cachedDoc.ID(), true) if err != nil { return err } @@ -1324,7 +1385,7 @@ func updateDocViaColUpdate( s.documents[action.CollectionID][action.DocID] = doc - return collections[action.CollectionID].Update(s.ctx, doc) + return collections[action.CollectionID].Update(ctx, doc) } func updateDocViaGQL( @@ -1350,9 +1411,12 @@ func updateDocViaGQL( input, ) - db := getStore(s, node, immutable.None[int](), action.ExpectedError) + txn := getTransaction(s, node, immutable.None[int](), action.ExpectedError) - result := db.ExecRequest(s.ctx, request) + ctx := db.SetContextTxn(s.ctx, txn) + ctx = db.SetContextIdentity(ctx, acpIdentity.New(action.Identity)) + + result := node.ExecRequest(ctx, request) if len(result.GQL.Errors) > 0 { return result.GQL.Errors[0] } @@ -1511,14 +1575,14 @@ func withRetry( return nil } -func getStore( +func getTransaction( s *state, db client.DB, transactionSpecifier immutable.Option[int], expectedError string, -) client.Store { +) datastore.Txn { if !transactionSpecifier.HasValue() { - return db + return nil } transactionID := transactionSpecifier.Value() @@ -1539,7 +1603,7 @@ func getStore( s.txns[transactionID] = txn } - return db.WithTxn(s.txns[transactionID]) + return s.txns[transactionID] } // commitTransaction commits the given transaction. @@ -1567,8 +1631,12 @@ func executeRequest( ) { var expectedErrorRaised bool for nodeID, node := range getNodes(action.NodeID, s.nodes) { - db := getStore(s, node, action.TransactionID, action.ExpectedError) - result := db.ExecRequest(s.ctx, action.Request) + txn := getTransaction(s, node, action.TransactionID, action.ExpectedError) + + ctx := db.SetContextTxn(s.ctx, txn) + ctx = db.SetContextIdentity(ctx, acpIdentity.New(action.Identity)) + + result := node.ExecRequest(ctx, action.Request) anyOfByFieldKey := map[docFieldKey][]any{} expectedErrorRaised = assertRequestResults( @@ -1672,7 +1740,8 @@ func AssertError(t *testing.T, description string, err error, expectedError stri return false } else { if !strings.Contains(err.Error(), expectedError) { - assert.ErrorIs(t, err, errors.New(expectedError)) + // Must be require instead of assert, otherwise will show a fake "error not raised". + require.ErrorIs(t, err, errors.New(expectedError)) return false } return true @@ -1736,7 +1805,7 @@ func assertRequestResults( return true } - log.Info(s.ctx, "", logging.NewKV("RequestResults", result.Data)) + log.InfoContext(s.ctx, "", corelog.Any("RequestResults", result.Data)) // compare results require.Equal(s.t, len(expectedResults), len(resultantData), @@ -1744,6 +1813,18 @@ func assertRequestResults( for docIndex, result := range resultantData { expectedResult := expectedResults[docIndex] + + require.Equal( + s.t, + len(expectedResult), + len(result), + fmt.Sprintf( + "%s \n(number of properties for item at index %v don't match)", + s.testCase.Description, + docIndex, + ), + ) + for field, actualValue := range result { expectedValue := expectedResult[field] @@ -1915,6 +1996,51 @@ func skipIfMutationTypeUnsupported(t *testing.T, supportedMutationTypes immutabl } } +// skipIfClientTypeUnsupported returns a new set of client types that match the given supported set. +// +// If supportedClientTypes is none no filtering will take place and the input client set will be returned. +// If the resultant filtered set is empty the test will be skipped. +func skipIfClientTypeUnsupported( + t *testing.T, + clients []ClientType, + supportedClientTypes immutable.Option[[]ClientType], +) []ClientType { + if !supportedClientTypes.HasValue() { + return clients + } + + filteredClients := []ClientType{} + for _, supportedMutationType := range supportedClientTypes.Value() { + for _, client := range clients { + if supportedMutationType == client { + filteredClients = append(filteredClients, client) + break + } + } + } + + if len(filteredClients) == 0 { + t.Skipf("test does not support any given client type. Type: %v", supportedClientTypes) + } + + return filteredClients +} + +// skipIfNetworkTest skips the current test if the given actions +// contain network actions and skipNetworkTests is true. +func skipIfNetworkTest(t *testing.T, actions []any) { + hasNetworkAction := false + for _, act := range actions { + switch act.(type) { + case ConfigureNode: + hasNetworkAction = true + } + } + if skipNetworkTests && hasNetworkAction { + t.Skip("test involves network actions") + } +} + func ParseSDL(gqlSDL string) (map[string]client.CollectionDefinition, error) { parser, err := graphql.NewParser() if err != nil { diff --git a/tests/integration/view/one_to_many/simple_test.go b/tests/integration/view/one_to_many/simple_test.go index f6ccd699b8..30f76987a2 100644 --- a/tests/integration/view/one_to_many/simple_test.go +++ b/tests/integration/view/one_to_many/simple_test.go @@ -122,7 +122,7 @@ func TestView_OneToManyWithMixedSDL_Errors(t *testing.T) { books: [Book] } `, - ExpectedError: "relation must be defined on both schemas. Field: books, Type: Book", + ExpectedError: "relation missing field. Object: Book, RelationName: authorview_book", }, }, } @@ -457,46 +457,3 @@ func TestView_OneToManyWithDoubleSidedRelation_Errors(t *testing.T) { testUtils.ExecuteTestCase(t, test) } - -func TestView_OneToManyViewOfView(t *testing.T) { - test := testUtils.TestCase{ - Description: "One to many view of view", - Actions: []any{ - testUtils.SchemaUpdate{ - Schema: ` - type Author { - name: String - books: [Book] - } - type Book { - name: String - author: Author - } - `, - }, - testUtils.CreateView{ - Query: ` - Author { - name - books { - name - } - } - `, - SDL: ` - type AuthorView { - name: String - books: [BookView] - } - interface BookView { - name: String - author: AuthorView - } - `, - ExpectedError: "relations in views must only be defined on one schema", - }, - }, - } - - testUtils.ExecuteTestCase(t, test) -} diff --git a/tests/integration/view/one_to_one/with_transform_test.go b/tests/integration/view/one_to_one/with_transform_test.go index cc638596e0..e6da410ee1 100644 --- a/tests/integration/view/one_to_one/with_transform_test.go +++ b/tests/integration/view/one_to_one/with_transform_test.go @@ -32,7 +32,7 @@ func TestView_OneToOneWithTransformOnOuter(t *testing.T) { } type Book { name: String - author: Author + author: Author @primary } `, }, diff --git a/tests/integration/view/simple/with_filter_test.go b/tests/integration/view/simple/with_filter_test.go index 07b0e130ed..a600a84729 100644 --- a/tests/integration/view/simple/with_filter_test.go +++ b/tests/integration/view/simple/with_filter_test.go @@ -118,6 +118,7 @@ func TestView_SimpleWithFilterOnViewAndQuery(t *testing.T) { query { UserView(filter: {age: {_eq: 31}}) { name + age } } `, diff --git a/tests/predefined/gen_predefined.go b/tests/predefined/gen_predefined.go index 8252156e55..34d575098e 100644 --- a/tests/predefined/gen_predefined.go +++ b/tests/predefined/gen_predefined.go @@ -109,7 +109,7 @@ type docGenerator struct { // It doesn't not modify the original doc. func toRequestedDoc(doc map[string]any, typeDef *client.CollectionDefinition) map[string]any { result := make(map[string]any) - for _, field := range typeDef.Schema.Fields { + for _, field := range typeDef.GetFields() { if field.IsRelation() || field.Name == request.DocIDFieldName { continue } @@ -131,17 +131,17 @@ func (this *docGenerator) generatePrimary( ) (map[string]any, []gen.GeneratedDoc, error) { result := []gen.GeneratedDoc{} requestedSecondary := toRequestedDoc(secDocMap, secType) - for _, secDocField := range secType.Schema.Fields { + for _, secDocField := range secType.GetFields() { if secDocField.IsRelation() { if secDocMapField, hasField := secDocMap[secDocField.Name]; hasField { if secDocField.IsPrimaryRelation { - primType := this.types[secDocField.Schema] + primType := this.types[secDocField.Kind.Underlying()] primDocMap, subResult, err := this.generatePrimary( secDocMap[secDocField.Name].(map[string]any), &primType) if err != nil { return nil, nil, NewErrFailedToGenerateDoc(err) } - primDoc, err := client.NewDocFromMap(primDocMap, primType.Schema) + primDoc, err := client.NewDocFromMap(primDocMap, primType) if err != nil { return nil, nil, NewErrFailedToGenerateDoc(err) } @@ -174,7 +174,7 @@ func (this *docGenerator) generateRelatedDocs(docMap map[string]any, typeName st if err != nil { return nil, err } - doc, err := client.NewDocFromMap(requested, typeDef.Schema) + doc, err := client.NewDocFromMap(requested, typeDef) if err != nil { return nil, NewErrFailedToGenerateDoc(err) } @@ -196,11 +196,11 @@ func (this *docGenerator) generateSecondaryDocs( parentTypeName string, ) ([]gen.GeneratedDoc, error) { result := []gen.GeneratedDoc{} - for _, field := range primaryType.Schema.Fields { + for _, field := range primaryType.GetFields() { if field.IsRelation() { if _, hasProp := primaryDocMap[field.Name]; hasProp { if !field.IsPrimaryRelation && - (parentTypeName == "" || parentTypeName != field.Schema) { + (parentTypeName == "" || parentTypeName != field.Kind.Underlying()) { docs, err := this.generateSecondaryDocsForField( primaryDocMap, primaryType.Description.Name.Value(), &field, docID) if err != nil { @@ -218,14 +218,14 @@ func (this *docGenerator) generateSecondaryDocs( func (this *docGenerator) generateSecondaryDocsForField( primaryDoc map[string]any, primaryTypeName string, - relField *client.SchemaFieldDescription, + relField *client.FieldDefinition, primaryDocID string, ) ([]gen.GeneratedDoc, error) { result := []gen.GeneratedDoc{} - relTypeDef := this.types[relField.Schema] + relTypeDef := this.types[relField.Kind.Underlying()] primaryPropName := "" - for _, relDocField := range relTypeDef.Schema.Fields { - if relDocField.Schema == primaryTypeName && relDocField.IsPrimaryRelation { + for _, relDocField := range relTypeDef.GetFields() { + if relDocField.Kind.Underlying() == primaryTypeName && relDocField.IsPrimaryRelation { primaryPropName = relDocField.Name + request.RelatedObjectID switch relVal := primaryDoc[relField.Name].(type) { case []map[string]any: diff --git a/tests/predefined/gen_predefined_test.go b/tests/predefined/gen_predefined_test.go index c5e863a51c..30cd446697 100644 --- a/tests/predefined/gen_predefined_test.go +++ b/tests/predefined/gen_predefined_test.go @@ -39,7 +39,7 @@ func TestGeneratePredefinedFromSchema_Simple(t *testing.T) { colDefMap, err := parseSDL(schema) require.NoError(t, err) - errorMsg := assertDocs(mustAddDocIDsToDocs(docsList.Docs, colDefMap["User"].Schema), docs) + errorMsg := assertDocs(mustAddDocIDsToDocs(docsList.Docs, colDefMap["User"]), docs) if errorMsg != "" { t.Error(errorMsg) } @@ -66,7 +66,7 @@ func TestGeneratePredefinedFromSchema_StripExcessiveFields(t *testing.T) { errorMsg := assertDocs(mustAddDocIDsToDocs([]map[string]any{ {"name": "John"}, {"name": "Fred"}, - }, colDefMap["User"].Schema), docs) + }, colDefMap["User"]), docs) if errorMsg != "" { t.Error(errorMsg) } @@ -80,7 +80,7 @@ func TestGeneratePredefinedFromSchema_OneToOne(t *testing.T) { } type Device { model: String - owner: User + owner: User @primary }` docs, err := CreateFromSDL(schema, DocsList{ @@ -108,18 +108,18 @@ func TestGeneratePredefinedFromSchema_OneToOne(t *testing.T) { userDocs := mustAddDocIDsToDocs([]map[string]any{ {"name": "John"}, {"name": "Fred"}, - }, colDefMap["User"].Schema) + }, colDefMap["User"]) deviceDocs := mustAddDocIDsToDocs([]map[string]any{ { "model": "iPhone", - "owner_id": mustGetDocIDFromDocMap(map[string]any{"name": "John"}, colDefMap["User"].Schema), + "owner_id": mustGetDocIDFromDocMap(map[string]any{"name": "John"}, colDefMap["User"]), }, { "model": "MacBook", - "owner_id": mustGetDocIDFromDocMap(map[string]any{"name": "Fred"}, colDefMap["User"].Schema), + "owner_id": mustGetDocIDFromDocMap(map[string]any{"name": "Fred"}, colDefMap["User"]), }, - }, colDefMap["Device"].Schema) + }, colDefMap["Device"]) errorMsg := assertDocs(append(userDocs, deviceDocs...), docs) if errorMsg != "" { @@ -163,17 +163,17 @@ func TestGeneratePredefinedFromSchema_OneToOnePrimary(t *testing.T) { userDocs := mustAddDocIDsToDocs([]map[string]any{ { "name": "John", - "device_id": mustGetDocIDFromDocMap(map[string]any{"model": "iPhone"}, colDefMap["Device"].Schema), + "device_id": mustGetDocIDFromDocMap(map[string]any{"model": "iPhone"}, colDefMap["Device"]), }, { "name": "Fred", - "device_id": mustGetDocIDFromDocMap(map[string]any{"model": "MacBook"}, colDefMap["Device"].Schema), + "device_id": mustGetDocIDFromDocMap(map[string]any{"model": "MacBook"}, colDefMap["Device"]), }, - }, colDefMap["User"].Schema) + }, colDefMap["User"]) deviceDocs := mustAddDocIDsToDocs([]map[string]any{ {"model": "iPhone"}, {"model": "MacBook"}, - }, colDefMap["Device"].Schema) + }, colDefMap["Device"]) errorMsg := assertDocs(append(userDocs, deviceDocs...), docs) if errorMsg != "" { @@ -216,15 +216,15 @@ func TestGeneratePredefinedFromSchema_OneToOneToOnePrimary(t *testing.T) { colDefMap, err := parseSDL(schema) require.NoError(t, err) - specsDoc := mustAddDocIDToDoc(map[string]any{"OS": "iOS"}, colDefMap["Specs"].Schema) + specsDoc := mustAddDocIDToDoc(map[string]any{"OS": "iOS"}, colDefMap["Specs"]) deviceDoc := mustAddDocIDToDoc(map[string]any{ "model": "iPhone", "specs_id": specsDoc[request.DocIDFieldName], - }, colDefMap["Device"].Schema) + }, colDefMap["Device"]) userDoc := mustAddDocIDToDoc(map[string]any{ "name": "John", "device_id": deviceDoc[request.DocIDFieldName], - }, colDefMap["User"].Schema) + }, colDefMap["User"]) errorMsg := assertDocs([]map[string]any{userDoc, deviceDoc, specsDoc}, docs) if errorMsg != "" { @@ -267,13 +267,13 @@ func TestGeneratePredefinedFromSchema_TwoPrimaryToOneMiddle(t *testing.T) { colDefMap, err := parseSDL(schema) require.NoError(t, err) - specsDoc := mustAddDocIDToDoc(map[string]any{"OS": "iOS"}, colDefMap["Specs"].Schema) - userDoc := mustAddDocIDToDoc(map[string]any{"name": "John"}, colDefMap["User"].Schema) + specsDoc := mustAddDocIDToDoc(map[string]any{"OS": "iOS"}, colDefMap["Specs"]) + userDoc := mustAddDocIDToDoc(map[string]any{"name": "John"}, colDefMap["User"]) deviceDoc := mustAddDocIDToDoc(map[string]any{ "model": "iPhone", "specs_id": specsDoc[request.DocIDFieldName], "owner_id": userDoc[request.DocIDFieldName], - }, colDefMap["Device"].Schema) + }, colDefMap["Device"]) errorMsg := assertDocs([]map[string]any{userDoc, deviceDoc, specsDoc}, docs) if errorMsg != "" { @@ -316,15 +316,15 @@ func TestGeneratePredefinedFromSchema_OneToTwoPrimary(t *testing.T) { colDefMap, err := parseSDL(schema) require.NoError(t, err) - deviceDoc := mustAddDocIDToDoc(map[string]any{"model": "iPhone"}, colDefMap["Device"].Schema) + deviceDoc := mustAddDocIDToDoc(map[string]any{"model": "iPhone"}, colDefMap["Device"]) specsDoc := mustAddDocIDToDoc(map[string]any{ "OS": "iOS", "device_id": deviceDoc[request.DocIDFieldName], - }, colDefMap["Specs"].Schema) + }, colDefMap["Specs"]) userDoc := mustAddDocIDToDoc(map[string]any{ "name": "John", "device_id": deviceDoc[request.DocIDFieldName], - }, colDefMap["User"].Schema) + }, colDefMap["User"]) errorMsg := assertDocs([]map[string]any{userDoc, deviceDoc, specsDoc}, docs) if errorMsg != "" { @@ -367,13 +367,13 @@ func TestGeneratePredefinedFromSchema_TwoPrimaryToOneRoot(t *testing.T) { colDefMap, err := parseSDL(schema) require.NoError(t, err) - deviceDoc := mustAddDocIDToDoc(map[string]any{"model": "iPhone"}, colDefMap["Device"].Schema) - addressDoc := mustAddDocIDToDoc(map[string]any{"street": "Backer"}, colDefMap["Address"].Schema) + deviceDoc := mustAddDocIDToDoc(map[string]any{"model": "iPhone"}, colDefMap["Device"]) + addressDoc := mustAddDocIDToDoc(map[string]any{"street": "Backer"}, colDefMap["Address"]) userDoc := mustAddDocIDToDoc(map[string]any{ "name": "John", "device_id": deviceDoc[request.DocIDFieldName], "address_id": addressDoc[request.DocIDFieldName], - }, colDefMap["User"].Schema) + }, colDefMap["User"]) errorMsg := assertDocs([]map[string]any{userDoc, deviceDoc, addressDoc}, docs) if errorMsg != "" { diff --git a/tests/predefined/util_test.go b/tests/predefined/util_test.go index f155062503..0160470b53 100644 --- a/tests/predefined/util_test.go +++ b/tests/predefined/util_test.go @@ -68,22 +68,22 @@ outer: return "" } -func mustGetDocIDFromDocMap(docMap map[string]any, sd client.SchemaDescription) string { - doc, err := client.NewDocFromMap(docMap, sd) +func mustGetDocIDFromDocMap(docMap map[string]any, collectionDefinition client.CollectionDefinition) string { + doc, err := client.NewDocFromMap(docMap, collectionDefinition) if err != nil { panic("can not get doc from map" + err.Error()) } return doc.ID().String() } -func mustAddDocIDToDoc(doc map[string]any, sd client.SchemaDescription) map[string]any { - doc[request.DocIDFieldName] = mustGetDocIDFromDocMap(doc, sd) +func mustAddDocIDToDoc(doc map[string]any, collectionDefinition client.CollectionDefinition) map[string]any { + doc[request.DocIDFieldName] = mustGetDocIDFromDocMap(doc, collectionDefinition) return doc } -func mustAddDocIDsToDocs(docs []map[string]any, sd client.SchemaDescription) []map[string]any { +func mustAddDocIDsToDocs(docs []map[string]any, collectionDefinition client.CollectionDefinition) []map[string]any { for i := range docs { - mustAddDocIDToDoc(docs[i], sd) + mustAddDocIDToDoc(docs[i], collectionDefinition) } return docs }