diff --git a/.github/dependabot.yml b/.github/dependabot.yml
index e48d4303f1..577198d399 100644
--- a/.github/dependabot.yml
+++ b/.github/dependabot.yml
@@ -15,4 +15,4 @@ updates:
labels:
- "dependencies"
commit-message:
- prefix: "bot"
\ No newline at end of file
+ prefix: "bot"
diff --git a/.github/workflows/build-dependencies.yml b/.github/workflows/build-dependencies.yml
index 112f847192..0bed67de09 100644
--- a/.github/workflows/build-dependencies.yml
+++ b/.github/workflows/build-dependencies.yml
@@ -37,7 +37,7 @@ jobs:
- name: Setup Go environment explicitly
uses: actions/setup-go@v3
with:
- go-version: "1.20"
+ go-version: "1.21"
check-latest: true
- name: Build all dependencies
diff --git a/.github/workflows/build-then-deploy-ami.yml b/.github/workflows/build-then-deploy-ami.yml
index ce6be1e0bd..4423d70659 100644
--- a/.github/workflows/build-then-deploy-ami.yml
+++ b/.github/workflows/build-then-deploy-ami.yml
@@ -109,7 +109,7 @@ jobs:
- name: Terraform validation
run: terraform validate -no-color
- - name: List workspaces
+ - name: List workspaces
run: ls workspaces
- name: Terraform Apply
diff --git a/.github/workflows/check-vulnerabilities.yml b/.github/workflows/check-vulnerabilities.yml
index 18e5f60de8..67d806ab6e 100644
--- a/.github/workflows/check-vulnerabilities.yml
+++ b/.github/workflows/check-vulnerabilities.yml
@@ -33,7 +33,7 @@ jobs:
- name: Run govulncheck
uses: golang/govulncheck-action@v1
with:
- go-version-input: "1.20"
+ go-version-input: "1.21"
go-package: ./...
check-latest: true
cache: true
diff --git a/.github/workflows/combine-bot-prs.yml b/.github/workflows/combine-bot-prs.yml
index 59a7910ced..abe21143d8 100644
--- a/.github/workflows/combine-bot-prs.yml
+++ b/.github/workflows/combine-bot-prs.yml
@@ -22,10 +22,10 @@ on:
default: 'dependabot'
mustBeGreen:
- description: 'Only combine PRs that are green (status is success). Set to false if repo does not run checks'
+ description: 'Only combine PRs that are green (status is success). Keep false if repo does not run checks'
type: boolean
required: true
- default: true
+ default: false
combineBranchName:
description: 'Name of the branch to combine PRs into'
@@ -35,7 +35,7 @@ on:
ignoreLabel:
description: 'Exclude PRs with this label'
required: true
- default: 'nocombine'
+ default: 'DO NOT MERGE'
jobs:
combine-bot-prs:
@@ -44,12 +44,12 @@ jobs:
runs-on: ubuntu-latest
steps:
- - uses: actions/github-script@v6
+ - name: Set current date as env variable
+ run: echo "CURRENT_DATE=$(date +'%d-%m-%Y')" >> ${GITHUB_ENV}
+ - name: Create combined pr
id: create-combined-pr
-
- name: Create combined pr
-
+ uses: actions/github-script@v6
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
script: |
@@ -66,7 +66,7 @@ jobs:
if (branch.startsWith('${{ github.event.inputs.branchPrefix }}')) {
console.log('Branch matched prefix: ' + branch);
let statusOK = true;
- if(${{ github.event.inputs.mustBeGreen }}) {
+ if (${{ github.event.inputs.mustBeGreen }}) {
console.log('Checking green status: ' + branch);
const stateQuery = `query($owner: String!, $repo: String!, $pull_number: Int!) {
repository(owner: $owner, name: $repo) {
@@ -92,17 +92,17 @@ jobs:
const [{ commit }] = result.repository.pullRequest.commits.nodes;
const state = commit.statusCheckRollup.state
console.log('Validating status: ' + state);
- if(state != 'SUCCESS') {
+ if (state != 'SUCCESS') {
console.log('Discarding ' + branch + ' with status ' + state);
statusOK = false;
}
}
console.log('Checking labels: ' + branch);
const labels = pull['labels'];
- for(const label of labels) {
+ for (const label of labels) {
const labelName = label['name'];
console.log('Checking label: ' + labelName);
- if(labelName == '${{ github.event.inputs.ignoreLabel }}') {
+ if (labelName == '${{ github.event.inputs.ignoreLabel }}') {
console.log('Discarding ' + branch + ' with label ' + labelName);
statusOK = false;
}
@@ -110,7 +110,10 @@ jobs:
if (statusOK) {
console.log('Adding branch to array: ' + branch);
const prString = '#' + pull['number'] + ' ' + pull['title'];
- branchesAndPRStrings.push({ branch, prString });
+ branchesAndPRStrings.push({
+ branch,
+ prString
+ });
baseBranch = pull['base']['ref'];
baseBranchSHA = pull['base']['sha'];
}
@@ -135,7 +138,7 @@ jobs:
let combinedPRs = [];
let mergeFailedPRs = [];
- for(const { branch, prString } of branchesAndPRStrings) {
+ for (const { branch, prString } of branchesAndPRStrings) {
try {
await github.rest.repos.merge({
owner: context.repo.owner,
@@ -153,15 +156,15 @@ jobs:
console.log('Creating combined PR');
const combinedPRsString = combinedPRs.join('\n');
- let body = 'â
This PR was created by the Combine PRs action by combining the following PRs:\n' + combinedPRsString;
- if(mergeFailedPRs.length > 0) {
+ let body = 'â
This PR was created by combining the following PRs:\n' + combinedPRsString;
+ if (mergeFailedPRs.length > 0) {
const mergeFailedPRsString = mergeFailedPRs.join('\n');
body += '\n\nâ ď¸ The following PRs were left out due to merge conflicts:\n' + mergeFailedPRsString
}
await github.rest.pulls.create({
owner: context.repo.owner,
repo: context.repo.repo,
- title: 'bot: Combined PRs',
+ title: 'bot: Update dependencies (bulk dependabot PRs) ${CURRENT_DATE}',
head: '${{ github.event.inputs.combineBranchName }}',
base: baseBranch,
body: body
diff --git a/.github/workflows/lint-then-benchmark.yml b/.github/workflows/lint-then-benchmark.yml
index 015c8725c2..c984cce3ef 100644
--- a/.github/workflows/lint-then-benchmark.yml
+++ b/.github/workflows/lint-then-benchmark.yml
@@ -57,7 +57,7 @@ jobs:
- name: Setup Go environment explicitly
uses: actions/setup-go@v3
with:
- go-version: "1.20"
+ go-version: "1.21"
check-latest: true
- name: Run the golangci-lint
@@ -270,7 +270,8 @@ jobs:
github.event_name == 'pull_request' &&
github.base_ref == 'develop'
run: >
- ${GOPATH}/bin/benchstat -html -alpha 1.1 develop.txt current.txt | sed -n "/
/,/<\/body>/p" > comparison.html &&
+ ${GOPATH}/bin/benchstat -html -alpha 1.1 develop.txt current.txt |
+ sed -n "//,/<\/body>/p" > comparison.html &&
./tools/scripts/pretty-benchstat-html.sh comparison.html > pretty-comparison.md
- name: Comment Benchmark Results on PR
diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml
index a08db5565a..b482903cad 100644
--- a/.github/workflows/lint.yml
+++ b/.github/workflows/lint.yml
@@ -24,8 +24,8 @@ permissions:
contents: read
jobs:
- lint:
- name: Lint job
+ lint-go:
+ name: Lint GoLang job
runs-on: ubuntu-latest
@@ -36,12 +36,11 @@ jobs:
- name: Setup Go environment explicitly
uses: actions/setup-go@v3
with:
- go-version: "1.20"
+ go-version: "1.21"
check-latest: true
- - name: Check linting through golangci-lint
+ - name: Run golangci-lint linter
uses: golangci/golangci-lint-action@v3
-
with:
# Required: the version of golangci-lint is required.
# Note: The version should not pick the patch version as the latest patch
@@ -68,3 +67,18 @@ jobs:
# anyways so there shouldn't be any linter errors anyways. The enforces us to
# always have a clean lint state.
only-new-issues: false
+
+ lint-yaml:
+ name: Lint YAML job
+
+ runs-on: ubuntu-latest
+
+ steps:
+ - name: Checkout code into the directory
+ uses: actions/checkout@v3
+
+ - name: Run yamllint linter
+ uses: ibiqlik/action-yamllint@v3
+ with:
+ config_file: tools/configs/yamllint.yaml
+ file_or_dir: .
diff --git a/.github/workflows/preview-ami-with-terraform-plan.yml b/.github/workflows/preview-ami-with-terraform-plan.yml
index ed2fef6f0c..25e975a247 100644
--- a/.github/workflows/preview-ami-with-terraform-plan.yml
+++ b/.github/workflows/preview-ami-with-terraform-plan.yml
@@ -131,5 +131,5 @@ jobs:
if: steps.terraform-plan.outcome == 'failure'
run: exit 1
- - name: List workspaces
+ - name: List workspaces
run: ls workspaces
diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml
index bfcdf9666f..22161daad5 100644
--- a/.github/workflows/release.yml
+++ b/.github/workflows/release.yml
@@ -28,63 +28,77 @@ jobs:
matrix:
os: [ubuntu-latest, macos-latest, windows-latest]
runs-on: ${{ matrix.os }}
+
steps:
- name: Checkout code into the directory
uses: actions/checkout@v3
with:
fetch-depth: 0
+ - name: Setup Go environment explicitly
+ uses: actions/setup-go@v3
+ with:
+ go-version: "1.21"
+ check-latest: true
+ cache: true
+
- name: Apply tag
run: git tag ${{ github.event.inputs.tag }}
-
+
- name: Build modules
run: make deps:modules
-
+
- name: Set up QEMU
- uses: docker/setup-qemu-action@v2
if: matrix.os == 'ubuntu-latest'
+ uses: docker/setup-qemu-action@v2
- name: Log in to Docker Hub
- uses: docker/login-action@v2
if: matrix.os == 'ubuntu-latest'
+ uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Log in to the Container registry
- uses: docker/login-action@v2
if: matrix.os == 'ubuntu-latest'
+ uses: docker/login-action@v2
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
-
- - name: Setup Go environment explicitly
- uses: actions/setup-go@v3
- with:
- go-version: "1.20"
- check-latest: true
- cache: true
- - shell: bash
- run: echo "sha_short=$(git rev-parse --short HEAD)" >> $GITHUB_ENV
- - uses: actions/cache@v4
+ - name: Run command to get SHA environment
+ shell: bash
+ run: echo "sha_short=$(git rev-parse --short HEAD)" >> ${GITHUB_ENV}
+
+ # Note: These saves don't actually happen right away, as if you notice there is
+ # no `dist` directory, when these are executed. The caching actually happens after
+ # the goreleaser is ran which populates the `dist` directory, which is then picked
+ # up in the job cleaning step that is ran end of this job. The step is a post-caching
+ # cleanup step which notices the target directory is now populated and caches it.
+ - name: Save cache on Linux
if: matrix.os == 'ubuntu-latest'
+ uses: actions/cache/save@v4
with:
path: dist/linux_amd64
key: linux-${{ env.sha_short }}
- - uses: actions/cache@v4
+
+ - name: Save cache on MacOS
if: matrix.os == 'macos-latest'
+ uses: actions/cache/save@v4
with:
path: dist/darwin_amd64
key: darwin-${{ env.sha_short }}
- - uses: actions/cache@v4
+
+ - name: Save cache on Windows
if: matrix.os == 'windows-latest'
+ uses: actions/cache/save@v4
with:
path: dist/windows_amd64
key: windows-${{ env.sha_short }}
enableCrossOsArchive: true
+ # This is the step that actually `populates` the `dist` directory.
- name: Run GoReleaser
uses: goreleaser/goreleaser-action@v5
with:
@@ -95,12 +109,14 @@ jobs:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GITHUB_REPOSITORY: ${{ github.repository }}
GORELEASER_KEY: ${{ secrets.GORELEASER_KEY }}
-
+ # Cacheing actually happens, about here (once the above is ran).
+
release:
runs-on: ubuntu-latest
needs: prepare
steps:
- - uses: actions/checkout@v3
+ - name: Checkout code into the directory
+ uses: actions/checkout@v3
with:
fetch-depth: 0
@@ -110,37 +126,57 @@ jobs:
- name: Setup Go environment explicitly
uses: actions/setup-go@v3
with:
- go-version: "1.20"
+ go-version: "1.21"
check-latest: true
cache: true
-
+
- name: Log in to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- # copy the cashes from prepare
- - shell: bash
- run: echo "sha_short=$(git rev-parse --short HEAD)" >> $GITHUB_ENV
- - uses: actions/cache@v4
+ - name: Run command to get SHA environment
+ shell: bash
+ run: echo "sha_short=$(git rev-parse --short HEAD)" >> ${GITHUB_ENV}
+
+ # Restore the cashes that were prepared for all OS
+ - name: Restore from cache on Linux
+ id: restore-linux
+ uses: actions/cache/restore@v4
with:
path: dist/linux_amd64
key: linux-${{ env.sha_short }}
- - uses: actions/cache@v4
+ fail-on-cache-miss: true
+
+ - name: Save from cache on MacOS
+ id: restore-macos
+ uses: actions/cache/restore@v4
with:
path: dist/darwin_amd64
key: darwin-${{ env.sha_short }}
- - uses: actions/cache@v4
+ fail-on-cache-miss: true
+
+ - name: Restore from cache on Windows
+ id: restore-windows
+ uses: actions/cache/restore@v4
with:
path: dist/windows_amd64
key: windows-${{ env.sha_short }}
+ fail-on-cache-miss: true
enableCrossOsArchive: true
+ # Technically the following should never happen as we are using the `fail-on-cache-miss=true`
+ # so it would fail before reaching here, but leaving for now incase the option is removed.
+ - name: Exit if failed to restore cache for any OS
+ if: |
+ steps.restore-linux.outputs.cache-hit != 'true' ||
+ steps.restore-macos.outputs.cache-hit != 'true' ||
+ steps.restore-windows.outputs.cache-hit != 'true'
+ run: exit 1
- # release
- - uses: goreleaser/goreleaser-action@v5
- if: steps.cache.outputs.cache-hit != 'true' # do not run if cache hit
+ - name: Do the release, only if all OS caches were restored
+ uses: goreleaser/goreleaser-action@v5
with:
distribution: goreleaser-pro
version: latest
@@ -149,7 +185,7 @@ jobs:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GITHUB_REPOSITORY: ${{ github.repository }}
GORELEASER_KEY: ${{ secrets.GORELEASER_KEY }}
-
+
pull-docker-image:
name: Pull docker image job
runs-on: ubuntu-latest
@@ -168,9 +204,9 @@ jobs:
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
-
+
- name: Pull Docker image
run: docker pull ${{ matrix.image_tag }}
- name: Test Docker image
- run: docker run --rm ${{ matrix.image_tag }}
\ No newline at end of file
+ run: docker run --rm ${{ matrix.image_tag }}
diff --git a/.github/workflows/start-binary.yml b/.github/workflows/start-binary.yml
index 267466b8a3..35fea3c022 100644
--- a/.github/workflows/start-binary.yml
+++ b/.github/workflows/start-binary.yml
@@ -37,7 +37,7 @@ jobs:
- name: Setup Go environment explicitly
uses: actions/setup-go@v3
with:
- go-version: "1.20"
+ go-version: "1.21"
check-latest: true
- name: Build modules
diff --git a/.github/workflows/test-and-upload-coverage.yml b/.github/workflows/test-and-upload-coverage.yml
index de7c9df848..60858b1f86 100644
--- a/.github/workflows/test-and-upload-coverage.yml
+++ b/.github/workflows/test-and-upload-coverage.yml
@@ -75,7 +75,7 @@ jobs:
- name: Setup Go environment explicitly
uses: actions/setup-go@v3
with:
- go-version: "1.20"
+ go-version: "1.21"
check-latest: true
- name: Build dependencies
@@ -105,7 +105,7 @@ jobs:
needs: run-tests
- # Important to know:
+ # Important to know:
# - We didn't use `if: always()` here, so this job doesn't run if we manually canceled.
# - `if: success()` is always implied unless `always()` or `failure()` is specified.
if: success() || failure()
diff --git a/.github/workflows/validate-containerfile.yml b/.github/workflows/validate-containerfile.yml
index b3315861ad..260e0dba89 100644
--- a/.github/workflows/validate-containerfile.yml
+++ b/.github/workflows/validate-containerfile.yml
@@ -54,4 +54,3 @@ jobs:
- name: Test Docker image
run: docker run --rm ${{ env.TEST_TAG }}
-
diff --git a/.goreleaser.yaml b/.goreleaser.yaml
index 119447d180..7995056d42 100644
--- a/.goreleaser.yaml
+++ b/.goreleaser.yaml
@@ -15,7 +15,7 @@ builds:
goarch:
- amd64
- arm64
- # A build with the playground included.
+ # A build with the playground included.
- id: "defradb_playground"
main: ./cmd/defradb
flags:
@@ -27,30 +27,44 @@ builds:
goarch:
- amd64
- arm64
-
+
partial:
by: target
archives:
- id: defradb_playground
- builds:
+ builds:
- defradb_playground
format: binary
# this name template makes the OS and Arch compatible with the results of `uname`.
- name_template: '{{ .Binary }}_playground_{{ .Version }}_{{ .Os }}_{{- if eq .Arch "amd64" }}x86_64{{- else }}{{ .Arch }}{{ end }}{{- if .Arm }}v{{ .Arm }}{{ end }}'
+ name_template: >-
+ {{ .Binary }}_playground_{{ .Version }}_{{ .Os }}_
+ {{- if eq .Arch "amd64" }}x86_64
+ {{- else }}{{ .Arch }}{{ end }}
+ {{- if .Arm }}v{{ .Arm }}{{ end }}
- id: defradb
- builds:
+ builds:
- defradb
format: binary
# this name template makes the OS and Arch compatible with the results of `uname`.
- name_template: '{{ .Binary }}_{{ .Version }}_{{ .Os }}_{{- if eq .Arch "amd64" }}x86_64{{- else }}{{ .Arch }}{{ end }}{{- if .Arm }}v{{ .Arm }}{{ end }}'
+ name_template: >-
+ {{ .Binary }}_{{ .Version }}_{{ .Os }}_
+ {{- if eq .Arch "amd64" }}x86_64
+ {{- else }}{{ .Arch }}{{ end }}
+ {{- if .Arm }}v{{ .Arm }}{{ end }}
release:
target_commitish: '{{ .Commit }}'
- header: |
- DefraDB v{{ .Major }}.{{ .Minor }} is a major pre-production release. Until the stable version 1.0 is reached, the SemVer minor patch number will denote notable releases, which will give the project freedom to experiment and explore potentially breaking changes.
+ header: >
+ DefraDB v{{ .Major }}.{{ .Minor }} is a major pre-production release.
+ Until the stable version 1.0 is reached, the SemVer minor patch number will denote notable releases,
+ which will give the project freedom to experiment and explore potentially breaking changes.
+
+ To get a full outline of the changes, we invite you to review the official changelog below.
+ This release does include a Breaking Change to existing v{{ .Major }}.{{ .Minor }}.x databases.
+ If you need help migrating an existing deployment, reach out at hello@source.network or join
+ our Discord at https://discord.gg/w7jYQVJ/.
- To get a full outline of the changes, we invite you to review the official changelog below. This release does include a Breaking Change to existing v{{ .Major }}.{{ .Minor }}.x databases. If you need help migrating an existing deployment, reach out at hello@source.network or join our Discord at https://discord.gg/w7jYQVJ/.
name_template: "v{{ .Version }} Release"
changelog:
@@ -85,21 +99,20 @@ milestones:
name_template: "DefraDB v{{ .Major }}.{{ .Minor }}"
dockers:
-- ids:
- - "defradb_playground"
- image_templates:
- - "{{ .Env.GITHUB_REPOSITORY }}:latest"
- - "{{ .Env.GITHUB_REPOSITORY }}:{{ .Version }}"
- - "ghcr.io/{{ .Env.GITHUB_REPOSITORY }}:{{ .Version }}"
- use: buildx
- build_flag_templates:
- - "--pull"
- - "--label=org.opencontainers.image.description=DefraDB is a Peer-to-Peer Edge Database."
- - "--label=org.opencontainers.image.created={{ .Date }}"
- - "--label=org.opencontainers.image.name={{ .ProjectName }}"
- - "--label=org.opencontainers.image.revision={{ .FullCommit }}"
- - "--label=org.opencontainers.image.version={{ .Version }}"
- - "--label=org.opencontainers.image.source={{ .GitURL }}"
- - "--platform=linux/amd64"
- dockerfile: ./tools/goreleaser.containerfile
-
+ - ids:
+ - "defradb_playground"
+ image_templates:
+ - "{{ .Env.GITHUB_REPOSITORY }}:latest"
+ - "{{ .Env.GITHUB_REPOSITORY }}:{{ .Version }}"
+ - "ghcr.io/{{ .Env.GITHUB_REPOSITORY }}:{{ .Version }}"
+ use: buildx
+ build_flag_templates:
+ - "--pull"
+ - "--label=org.opencontainers.image.description=DefraDB is a Peer-to-Peer Edge Database."
+ - "--label=org.opencontainers.image.created={{ .Date }}"
+ - "--label=org.opencontainers.image.name={{ .ProjectName }}"
+ - "--label=org.opencontainers.image.revision={{ .FullCommit }}"
+ - "--label=org.opencontainers.image.version={{ .Version }}"
+ - "--label=org.opencontainers.image.source={{ .GitURL }}"
+ - "--platform=linux/amd64"
+ dockerfile: ./tools/goreleaser.containerfile
diff --git a/Makefile b/Makefile
index 0ddde9790f..56ff8cbcbe 100644
--- a/Makefile
+++ b/Makefile
@@ -5,6 +5,33 @@ ifndef VERBOSE
MAKEFLAGS+=--no-print-directory
endif
+# Detect OS (`Linux`, `Darwin`, `Windows`)
+# Note: can use `lsb_release --id --short` for more specfic linux distro information.
+OS_GENERAL := Unknown
+ifeq ($(OS),Windows_NT)
+ OS_GENERAL := Windows
+else
+ OS_GENERAL := $(shell sh -c 'uname 2>/dev/null || echo Unknown')
+endif
+
+# Detect OS specfic package manager if possible (`apt`, `yum`, `pacman`, `brew`, `choco`)
+OS_PACKAGE_MANAGER := Unknown
+ifeq ($(OS_GENERAL),Linux)
+ ifneq ($(shell which apt 2>/dev/null),)
+ OS_PACKAGE_MANAGER := apt
+ else ifneq ($(shell which yum 2>/dev/null),)
+ OS_PACKAGE_MANAGER := yum
+ else ifneq ($(shell which pacman 2>/dev/null),)
+ OS_PACKAGE_MANAGER := pacman
+ else ifneq ($(shell which dnf 2>/dev/null),)
+ OS_PACKAGE_MANAGER := dnf
+ endif
+else ifeq ($(OS_GENERAL),Darwin)
+ OS_PACKAGE_MANAGER := brew
+else ifeq ($(OS_GENERAL),Windows)
+ OS_PACKAGE_MANAGER := choco
+endif
+
# Provide info from git to the version package using linker flags.
ifeq (, $(shell which git))
$(error "No git in $(PATH), version information won't be included")
@@ -18,6 +45,15 @@ else
VERSION_GITRELEASE=$(shell git describe --tags)
endif
+$(info ----------------------------------------);
+$(info OS = $(OS_GENERAL));
+$(info PACKAGE_MANAGER = $(OS_PACKAGE_MANAGER));
+$(info GOINFO = $(VERSION_GOINFO));
+$(info GITCOMMIT = $(VERSION_GITCOMMIT));
+$(info GITCOMMITDATE = $(VERSION_GITCOMMITDATE));
+$(info GITRELEASE = $(VERSION_GITRELEASE));
+$(info ----------------------------------------);
+
BUILD_FLAGS=-trimpath -ldflags "\
-X 'github.com/sourcenetwork/defradb/version.GoInfo=$(VERSION_GOINFO)'\
-X 'github.com/sourcenetwork/defradb/version.GitRelease=$(VERSION_GITRELEASE)'\
@@ -47,6 +83,15 @@ default:
install:
@go install $(BUILD_FLAGS) ./cmd/defradb
+.PHONY: install\:manpages
+install\:manpages:
+ifeq ($(OS_GENERAL),Linux)
+ cp build/man/* /usr/share/man/man1/
+endif
+ifneq ($(OS_GENERAL),Linux)
+ @echo "Direct installation of Defradb's man pages is not supported on your system."
+endif
+
# Usage:
# - make build
# - make build path="path/to/defradb-binary"
@@ -78,9 +123,23 @@ client\:dump:
client\:add-schema:
./build/defradb client schema add -f examples/schema/bookauthpub.graphql
+.PHONY: deps\:lint-go
+deps\:lint-go:
+ go install github.com/golangci/golangci-lint/cmd/golangci-lint@v1.54
+
+.PHONY: deps\:lint-yaml
+deps\:lint-yaml:
+ifeq (, $(shell which yamllint))
+ $(info YAML linter 'yamllint' not found on the system, please install it.)
+ $(info Can try using your local package manager: $(OS_PACKAGE_MANAGER))
+else
+ $(info YAML linter 'yamllint' already installed.)
+endif
+
.PHONY: deps\:lint
deps\:lint:
- go install github.com/golangci/golangci-lint/cmd/golangci-lint@v1.54
+ @$(MAKE) deps:lint-go && \
+ $(MAKE) deps:lint-yaml
.PHONY: deps\:test
deps\:test:
@@ -147,7 +206,7 @@ verify:
.PHONY: tidy
tidy:
- go mod tidy -go=1.20
+ go mod tidy -go=1.21
.PHONY: clean
clean:
@@ -300,6 +359,7 @@ validate\:circleci:
.PHONY: lint
lint:
golangci-lint run --config tools/configs/golangci.yaml
+ yamllint -c tools/configs/yamllint.yaml .
.PHONY: lint\:fix
lint\:fix:
@@ -334,13 +394,3 @@ docs\:manpages:
docs\:godoc:
godoc -http=:6060
# open http://localhost:6060/pkg/github.com/sourcenetwork/defradb/
-
-detectedOS := $(shell uname)
-.PHONY: install\:manpages
-install\:manpages:
-ifeq ($(detectedOS),Linux)
- cp build/man/* /usr/share/man/man1/
-endif
-ifneq ($(detectedOS),Linux)
- @echo "Direct installation of Defradb's man pages is not supported on your system."
-endif
diff --git a/cli/collection.go b/cli/collection.go
index 8af1839b47..996af66f9a 100644
--- a/cli/collection.go
+++ b/cli/collection.go
@@ -81,7 +81,7 @@ func MakeCollectionCommand(cfg *config.Config) *cobra.Command {
fetchedCols := cols
cols = nil
for _, c := range fetchedCols {
- if c.Name() == name {
+ if c.Name().Value() == name {
cols = append(cols, c)
break
}
diff --git a/client/collection.go b/client/collection.go
index 3a42871c62..9ce1e135d6 100644
--- a/client/collection.go
+++ b/client/collection.go
@@ -13,6 +13,8 @@ package client
import (
"context"
+ "github.com/sourcenetwork/immutable"
+
"github.com/sourcenetwork/defradb/datastore"
)
@@ -32,7 +34,7 @@ type CollectionDefinition struct {
// Many functions on this object will interact with the underlying datastores.
type Collection interface {
// Name returns the name of this collection.
- Name() string
+ Name() immutable.Option[string]
// ID returns the ID of this Collection.
ID() uint32
diff --git a/client/descriptions.go b/client/descriptions.go
index 7a4ec0ba7e..ca88f9362e 100644
--- a/client/descriptions.go
+++ b/client/descriptions.go
@@ -11,8 +11,11 @@
package client
import (
+ "encoding/json"
"fmt"
+ "github.com/sourcenetwork/immutable"
+
"github.com/sourcenetwork/defradb/client/request"
)
@@ -22,7 +25,7 @@ type CollectionDescription struct {
//
// It is conceptually local to the node hosting the DefraDB instance, but currently there
// is no means to update the local value so that it differs from the (global) schema name.
- Name string
+ Name immutable.Option[string]
// ID is the local identifier of this collection.
//
@@ -32,12 +35,11 @@ type CollectionDescription struct {
// The ID of the schema version that this collection is at.
SchemaVersionID string
- // BaseQuery contains the base query of this view, if this collection is a view.
+ // Sources is the set of sources from which this collection draws data.
//
- // The query will be saved, and then may be accessed by other actors on demand. Actor defined
- // aggregates, filters and other logic (such as LensVM transforms) will execute on top of this
- // base query before the result is returned to the actor.
- BaseQuery *request.Select
+ // Currently supported source types are:
+ // - [QuerySource]
+ Sources []any
// Indexes contains the secondary indexes that this Collection has.
Indexes []IndexDescription
@@ -78,13 +80,37 @@ func (col CollectionDescription) GetFieldByRelation(
schema *SchemaDescription,
) (FieldDescription, bool) {
for _, field := range schema.Fields {
- if field.RelationName == relationName && !(col.Name == otherCollectionName && otherFieldName == field.Name) {
+ if field.RelationName == relationName && !(col.Name.Value() == otherCollectionName && otherFieldName == field.Name) {
return field, true
}
}
return FieldDescription{}, false
}
+// QuerySources returns all the Sources of type [QuerySource]
+func (col CollectionDescription) QuerySources() []*QuerySource {
+ return sourcesOfType[*QuerySource](col)
+}
+
+func sourcesOfType[ResultType any](col CollectionDescription) []ResultType {
+ result := []ResultType{}
+ for _, source := range col.Sources {
+ if typedSource, isOfType := source.(ResultType); isOfType {
+ result = append(result, typedSource)
+ }
+ }
+ return result
+}
+
+// QuerySource represents a collection data source from a query.
+//
+// The query will be executed when data from this source is requested, and the query results
+// yielded to the consumer.
+type QuerySource struct {
+ // Query contains the base query of this data source.
+ Query request.Select
+}
+
// SchemaDescription describes a Schema and its associated metadata.
type SchemaDescription struct {
// Root is the version agnostic identifier for this schema.
@@ -157,6 +183,8 @@ func (f FieldKind) String() string {
return "[String!]"
case FieldKind_BLOB:
return "Blob"
+ case FieldKind_JSON:
+ return "JSON"
default:
return fmt.Sprint(uint8(f))
}
@@ -178,7 +206,7 @@ const (
FieldKind_STRING FieldKind = 11
FieldKind_STRING_ARRAY FieldKind = 12
FieldKind_BLOB FieldKind = 13
- _ FieldKind = 14 // safe to repurpose (was never used)
+ FieldKind_JSON FieldKind = 14
_ FieldKind = 15 // safe to repurpose (was never used)
// Embedded object, but accessed via foreign keys
@@ -216,6 +244,7 @@ var FieldKindStringToEnumMapping = map[string]FieldKind{
"[String]": FieldKind_NILLABLE_STRING_ARRAY,
"[String!]": FieldKind_STRING_ARRAY,
"Blob": FieldKind_BLOB,
+ "JSON": FieldKind_JSON,
}
// RelationType describes the type of relation between two types.
@@ -322,3 +351,59 @@ func (f FieldDescription) IsArray() bool {
func (m RelationType) IsSet(target RelationType) bool {
return m&target > 0
}
+
+// collectionDescription is a private type used to facilitate the unmarshalling
+// of json to a [CollectionDescription].
+type collectionDescription struct {
+ // These properties are unmarshalled using the default json unmarshaller
+ Name immutable.Option[string]
+ ID uint32
+ SchemaVersionID string
+ Indexes []IndexDescription
+
+ // Properties below this line are unmarshalled using custom logic in [UnmarshalJSON]
+ Sources []map[string]json.RawMessage
+}
+
+func (c *CollectionDescription) UnmarshalJSON(bytes []byte) error {
+ var descMap collectionDescription
+ err := json.Unmarshal(bytes, &descMap)
+ if err != nil {
+ return err
+ }
+
+ c.Name = descMap.Name
+ c.ID = descMap.ID
+ c.SchemaVersionID = descMap.SchemaVersionID
+ c.Indexes = descMap.Indexes
+ c.Sources = make([]any, len(descMap.Sources))
+
+ for i, source := range descMap.Sources {
+ sourceJson, err := json.Marshal(source)
+ if err != nil {
+ return err
+ }
+
+ var sourceValue any
+ // We detect which concrete type each `Source` object is by detecting
+ // non-nillable fields, if the key is present it must be of that type.
+ // They must be non-nillable as nil values may have their keys omitted from
+ // the json. This also relies on the fields being unique. We may wish to change
+ // this later to custom-serialize with a `_type` property.
+ if _, ok := source["Query"]; ok {
+ // This must be a QuerySource, as only the `QuerySource` type has a `Query` field
+ var querySource QuerySource
+ err := json.Unmarshal(sourceJson, &querySource)
+ if err != nil {
+ return err
+ }
+ sourceValue = &querySource
+ } else {
+ return ErrFailedToUnmarshalCollection
+ }
+
+ c.Sources[i] = sourceValue
+ }
+
+ return nil
+}
diff --git a/client/document.go b/client/document.go
index 93e06df27e..7345f10d09 100644
--- a/client/document.go
+++ b/client/document.go
@@ -177,7 +177,7 @@ func NewDocsFromJSON(obj []byte, sd SchemaDescription) ([]*Document, error) {
// the typed value again as an interface.
func validateFieldSchema(val any, field FieldDescription) (any, error) {
switch field.Kind {
- case FieldKind_DocID, FieldKind_STRING, FieldKind_BLOB:
+ case FieldKind_DocID, FieldKind_STRING, FieldKind_BLOB, FieldKind_JSON:
return getString(val)
case FieldKind_STRING_ARRAY:
diff --git a/client/errors.go b/client/errors.go
index 78daf3531b..60ccac9669 100644
--- a/client/errors.go
+++ b/client/errors.go
@@ -17,17 +17,19 @@ import (
)
const (
- errFieldNotExist string = "The given field does not exist"
- errUnexpectedType string = "unexpected type"
- errParsingFailed string = "failed to parse argument"
- errUninitializeProperty string = "invalid state, required property is uninitialized"
- errMaxTxnRetries string = "reached maximum transaction reties"
- errRelationOneSided string = "relation must be defined on both schemas"
- errCollectionNotFound string = "collection not found"
- errFieldOrAliasToFieldNotExist string = "The given field or alias to field does not exist"
- errUnknownCRDT string = "unknown crdt"
- errCRDTKindMismatch string = "CRDT type %s can't be assigned to field kind %s"
- errInvalidCRDTType string = "CRDT type not supported"
+ errFieldNotExist string = "The given field does not exist"
+ errUnexpectedType string = "unexpected type"
+ errParsingFailed string = "failed to parse argument"
+ errUninitializeProperty string = "invalid state, required property is uninitialized"
+ errMaxTxnRetries string = "reached maximum transaction reties"
+ errRelationOneSided string = "relation must be defined on both schemas"
+ errCollectionNotFound string = "collection not found"
+ errFieldOrAliasToFieldNotExist string = "The given field or alias to field does not exist"
+ errUnknownCRDT string = "unknown crdt"
+ errCRDTKindMismatch string = "CRDT type %s can't be assigned to field kind %s"
+ errInvalidCRDTType string = "CRDT type not supported"
+ errFailedToUnmarshalCollection string = "failed to unmarshal collection json"
+ errOperationNotPermittedOnNamelessCols string = "operation not permitted on nameless collection"
)
// Errors returnable from this package.
@@ -35,16 +37,18 @@ const (
// This list is incomplete and undefined errors may also be returned.
// Errors returned from this package may be tested against these errors with errors.Is.
var (
- ErrFieldNotExist = errors.New(errFieldNotExist)
- ErrUnexpectedType = errors.New(errUnexpectedType)
- ErrFieldNotObject = errors.New("trying to access field on a non object type")
- ErrValueTypeMismatch = errors.New("value does not match indicated type")
- ErrDocumentNotFound = errors.New("no document for the given ID exists")
- ErrInvalidUpdateTarget = errors.New("the target document to update is of invalid type")
- ErrInvalidUpdater = errors.New("the updater of a document is of invalid type")
- ErrInvalidDeleteTarget = errors.New("the target document to delete is of invalid type")
- ErrMalformedDocID = errors.New("malformed document ID, missing either version or cid")
- ErrInvalidDocIDVersion = errors.New("invalid document ID version")
+ ErrFieldNotExist = errors.New(errFieldNotExist)
+ ErrUnexpectedType = errors.New(errUnexpectedType)
+ ErrFailedToUnmarshalCollection = errors.New(errFailedToUnmarshalCollection)
+ ErrOperationNotPermittedOnNamelessCols = errors.New(errOperationNotPermittedOnNamelessCols)
+ ErrFieldNotObject = errors.New("trying to access field on a non object type")
+ ErrValueTypeMismatch = errors.New("value does not match indicated type")
+ ErrDocumentNotFound = errors.New("no document for the given ID exists")
+ ErrInvalidUpdateTarget = errors.New("the target document to update is of invalid type")
+ ErrInvalidUpdater = errors.New("the updater of a document is of invalid type")
+ ErrInvalidDeleteTarget = errors.New("the target document to delete is of invalid type")
+ ErrMalformedDocID = errors.New("malformed document ID, missing either version or cid")
+ ErrInvalidDocIDVersion = errors.New("invalid document ID version")
)
// NewErrFieldNotExist returns an error indicating that the given field does not exist.
diff --git a/client/index.go b/client/index.go
index 5e2d397394..d0726b1625 100644
--- a/client/index.go
+++ b/client/index.go
@@ -58,3 +58,15 @@ func (d CollectionDescription) CollectIndexedFields(schema *SchemaDescription) [
}
return fields
}
+
+// GetIndexesOnField returns all indexes that are indexing the given field.
+// If the field is not the first field of a composite index, the index is not returned.
+func (d CollectionDescription) GetIndexesOnField(fieldName string) []IndexDescription {
+ result := []IndexDescription{}
+ for _, index := range d.Indexes {
+ if index.Fields[0].Name == fieldName {
+ result = append(result, index)
+ }
+ }
+ return result
+}
diff --git a/client/index_test.go b/client/index_test.go
new file mode 100644
index 0000000000..feb8ccdd69
--- /dev/null
+++ b/client/index_test.go
@@ -0,0 +1,129 @@
+// Copyright 2024 Democratized Data Foundation
+//
+// Use of this software is governed by the Business Source License
+// included in the file licenses/BSL.txt.
+//
+// As of the Change Date specified in that file, in accordance with
+// the Business Source License, use of this software will be governed
+// by the Apache License, Version 2.0, included in the file
+// licenses/APL.txt.
+
+package client
+
+import (
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestCollectIndexesOnField(t *testing.T) {
+ tests := []struct {
+ name string
+ desc CollectionDescription
+ field string
+ expected []IndexDescription
+ }{
+ {
+ name: "no indexes",
+ desc: CollectionDescription{
+ Indexes: []IndexDescription{},
+ },
+ field: "test",
+ expected: []IndexDescription{},
+ },
+ {
+ name: "single index on field",
+ desc: CollectionDescription{
+ Indexes: []IndexDescription{
+ {
+ Name: "index1",
+ Fields: []IndexedFieldDescription{
+ {Name: "test", Direction: Ascending},
+ },
+ },
+ },
+ },
+ field: "test",
+ expected: []IndexDescription{
+ {
+ Name: "index1",
+ Fields: []IndexedFieldDescription{
+ {Name: "test", Direction: Ascending},
+ },
+ },
+ },
+ },
+ {
+ name: "multiple indexes on field",
+ desc: CollectionDescription{
+ Indexes: []IndexDescription{
+ {
+ Name: "index1",
+ Fields: []IndexedFieldDescription{
+ {Name: "test", Direction: Ascending},
+ },
+ },
+ {
+ Name: "index2",
+ Fields: []IndexedFieldDescription{
+ {Name: "test", Direction: Descending},
+ },
+ },
+ },
+ },
+ field: "test",
+ expected: []IndexDescription{
+ {
+ Name: "index1",
+ Fields: []IndexedFieldDescription{
+ {Name: "test", Direction: Ascending},
+ },
+ },
+ {
+ Name: "index2",
+ Fields: []IndexedFieldDescription{
+ {Name: "test", Direction: Descending},
+ },
+ },
+ },
+ },
+ {
+ name: "no indexes on field",
+ desc: CollectionDescription{
+ Indexes: []IndexDescription{
+ {
+ Name: "index1",
+ Fields: []IndexedFieldDescription{
+ {Name: "other", Direction: Ascending},
+ },
+ },
+ },
+ },
+ field: "test",
+ expected: []IndexDescription{},
+ },
+ {
+ name: "second field in composite index",
+ desc: CollectionDescription{
+ Indexes: []IndexDescription{
+ {
+ Name: "index1",
+ Fields: []IndexedFieldDescription{
+ {Name: "other", Direction: Ascending},
+ {Name: "test", Direction: Ascending},
+ },
+ },
+ },
+ },
+ field: "test",
+ expected: []IndexDescription{},
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ actual := tt.desc.GetIndexesOnField(tt.field)
+ assert.Equal(t, tt.expected, actual)
+ })
+ }
+}
diff --git a/cmd/genopenapi/main.go b/cmd/genopenapi/main.go
new file mode 100644
index 0000000000..ed655eb932
--- /dev/null
+++ b/cmd/genopenapi/main.go
@@ -0,0 +1,33 @@
+// Copyright 2024 Democratized Data Foundation
+//
+// Use of this software is governed by the Business Source License
+// included in the file licenses/BSL.txt.
+//
+// As of the Change Date specified in that file, in accordance with
+// the Business Source License, use of this software will be governed
+// by the Apache License, Version 2.0, included in the file
+// licenses/APL.txt.
+
+/*
+genopenapi is a tool to generate and print an OpenAPI specification.
+*/
+package main
+
+import (
+ "fmt"
+ "os"
+
+ "github.com/sourcenetwork/defradb/http"
+)
+
+func main() {
+ router, err := http.NewApiRouter()
+ if err != nil {
+ panic(err)
+ }
+ json, err := router.OpenAPI().MarshalJSON()
+ if err != nil {
+ panic(err)
+ }
+ fmt.Fprint(os.Stdout, string(json))
+}
diff --git a/core/key.go b/core/key.go
index 0c038b11dd..cb67cc45d6 100644
--- a/core/key.go
+++ b/core/key.go
@@ -17,6 +17,7 @@ import (
"github.com/ipfs/go-cid"
ds "github.com/ipfs/go-datastore"
+ "github.com/sourcenetwork/immutable"
"github.com/sourcenetwork/defradb/client"
"github.com/sourcenetwork/defradb/errors"
@@ -132,8 +133,8 @@ var _ Key = (*CollectionSchemaVersionKey)(nil)
// CollectionIndexKey to a stored description of an index
type CollectionIndexKey struct {
- // CollectionName is the name of the collection that the index is on
- CollectionName string
+ // CollectionID is the id of the collection that the index is on
+ CollectionID immutable.Option[uint32]
// IndexName is the name of the index
IndexName string
}
@@ -291,14 +292,14 @@ func NewCollectionSchemaVersionKeyFromString(key string) (CollectionSchemaVersio
}
// NewCollectionIndexKey creates a new CollectionIndexKey from a collection name and index name.
-func NewCollectionIndexKey(colID, indexName string) CollectionIndexKey {
- return CollectionIndexKey{CollectionName: colID, IndexName: indexName}
+func NewCollectionIndexKey(colID immutable.Option[uint32], indexName string) CollectionIndexKey {
+ return CollectionIndexKey{CollectionID: colID, IndexName: indexName}
}
// NewCollectionIndexKeyFromString creates a new CollectionIndexKey from a string.
// It expects the input string is in the following format:
//
-// /collection/index/[CollectionName]/[IndexName]
+// /collection/index/[CollectionID]/[IndexName]
//
// Where [IndexName] might be omitted. Anything else will return an error.
func NewCollectionIndexKeyFromString(key string) (CollectionIndexKey, error) {
@@ -306,7 +307,13 @@ func NewCollectionIndexKeyFromString(key string) (CollectionIndexKey, error) {
if len(keyArr) < 4 || len(keyArr) > 5 || keyArr[1] != "collection" || keyArr[2] != "index" {
return CollectionIndexKey{}, ErrInvalidKey
}
- result := CollectionIndexKey{CollectionName: keyArr[3]}
+
+ colID, err := strconv.Atoi(keyArr[3])
+ if err != nil {
+ return CollectionIndexKey{}, err
+ }
+
+ result := CollectionIndexKey{CollectionID: immutable.Some(uint32(colID))}
if len(keyArr) == 5 {
result.IndexName = keyArr[4]
}
@@ -315,13 +322,13 @@ func NewCollectionIndexKeyFromString(key string) (CollectionIndexKey, error) {
// ToString returns the string representation of the key
// It is in the following format:
-// /collection/index/[CollectionName]/[IndexName]
-// if [CollectionName] is empty, the rest is ignored
+// /collection/index/[CollectionID]/[IndexName]
+// if [CollectionID] is empty, the rest is ignored
func (k CollectionIndexKey) ToString() string {
result := COLLECTION_INDEX
- if k.CollectionName != "" {
- result = result + "/" + k.CollectionName
+ if k.CollectionID.HasValue() {
+ result = result + "/" + fmt.Sprint(k.CollectionID.Value())
if k.IndexName != "" {
result = result + "/" + k.IndexName
}
diff --git a/core/key_test.go b/core/key_test.go
index 4984c5b14f..52a22a5856 100644
--- a/core/key_test.go
+++ b/core/key_test.go
@@ -14,6 +14,7 @@ import (
"testing"
ds "github.com/ipfs/go-datastore"
+ "github.com/sourcenetwork/immutable"
"github.com/stretchr/testify/assert"
)
@@ -110,23 +111,23 @@ func TestNewDataStoreKey_GivenAStringWithExtraSuffix(t *testing.T) {
}
func TestNewIndexKey_IfEmptyParam_ReturnPrefix(t *testing.T) {
- key := NewCollectionIndexKey("", "")
+ key := NewCollectionIndexKey(immutable.None[uint32](), "")
assert.Equal(t, "/collection/index", key.ToString())
}
func TestNewIndexKey_IfParamsAreGiven_ReturnFullKey(t *testing.T) {
- key := NewCollectionIndexKey("col", "idx")
- assert.Equal(t, "/collection/index/col/idx", key.ToString())
+ key := NewCollectionIndexKey(immutable.Some[uint32](1), "idx")
+ assert.Equal(t, "/collection/index/1/idx", key.ToString())
}
func TestNewIndexKey_InNoCollectionName_ReturnJustPrefix(t *testing.T) {
- key := NewCollectionIndexKey("", "idx")
+ key := NewCollectionIndexKey(immutable.None[uint32](), "idx")
assert.Equal(t, "/collection/index", key.ToString())
}
func TestNewIndexKey_InNoIndexName_ReturnWithoutIndexName(t *testing.T) {
- key := NewCollectionIndexKey("col", "")
- assert.Equal(t, "/collection/index/col", key.ToString())
+ key := NewCollectionIndexKey(immutable.Some[uint32](1), "")
+ assert.Equal(t, "/collection/index/1", key.ToString())
}
func TestNewIndexKeyFromString_IfInvalidString_ReturnError(t *testing.T) {
@@ -144,17 +145,17 @@ func TestNewIndexKeyFromString_IfInvalidString_ReturnError(t *testing.T) {
}
func TestNewIndexKeyFromString_IfOnlyCollectionName_ReturnKey(t *testing.T) {
- key, err := NewCollectionIndexKeyFromString("/collection/index/col")
+ key, err := NewCollectionIndexKeyFromString("/collection/index/1")
assert.NoError(t, err)
- assert.Equal(t, key.CollectionName, "col")
- assert.Equal(t, key.IndexName, "")
+ assert.Equal(t, immutable.Some[uint32](1), key.CollectionID)
+ assert.Equal(t, "", key.IndexName)
}
func TestNewIndexKeyFromString_IfFullKeyString_ReturnKey(t *testing.T) {
- key, err := NewCollectionIndexKeyFromString("/collection/index/col/idx")
+ key, err := NewCollectionIndexKeyFromString("/collection/index/1/idx")
assert.NoError(t, err)
- assert.Equal(t, key.CollectionName, "col")
- assert.Equal(t, key.IndexName, "idx")
+ assert.Equal(t, immutable.Some[uint32](1), key.CollectionID)
+ assert.Equal(t, "idx", key.IndexName)
}
func toFieldValues(values ...string) [][]byte {
@@ -312,10 +313,10 @@ func TestIndexDatastoreKey_EqualTrue(t *testing.T) {
func TestCollectionIndexKey_Bytes(t *testing.T) {
key := CollectionIndexKey{
- CollectionName: "col",
- IndexName: "idx",
+ CollectionID: immutable.Some[uint32](1),
+ IndexName: "idx",
}
- assert.Equal(t, []byte(COLLECTION_INDEX+"/col/idx"), key.Bytes())
+ assert.Equal(t, []byte(COLLECTION_INDEX+"/1/idx"), key.Bytes())
}
func TestIndexDatastoreKey_EqualFalse(t *testing.T) {
diff --git a/db/backup.go b/db/backup.go
index d3a1138686..5573d77894 100644
--- a/db/backup.go
+++ b/db/backup.go
@@ -137,7 +137,7 @@ func (db *db) basicExport(ctx context.Context, txn datastore.Txn, config *client
}
colNameCache := map[string]struct{}{}
for _, col := range cols {
- colNameCache[col.Name()] = struct{}{}
+ colNameCache[col.Name().Value()] = struct{}{}
}
tempFile := config.Filepath + ".temp"
@@ -181,8 +181,8 @@ func (db *db) basicExport(ctx context.Context, txn datastore.Txn, config *client
// set collection
err = writeString(
f,
- fmt.Sprintf("\"%s\":[", col.Name()),
- fmt.Sprintf(" \"%s\": [\n", col.Name()),
+ fmt.Sprintf("\"%s\":[", col.Name().Value()),
+ fmt.Sprintf(" \"%s\": [\n", col.Name().Value()),
config.Pretty,
)
if err != nil {
diff --git a/db/collection.go b/db/collection.go
index f066c1d9fe..50a8acb4f6 100644
--- a/db/collection.go
+++ b/db/collection.go
@@ -96,12 +96,14 @@ func (db *db) createCollection(
schema := def.Schema
desc := def.Description
- exists, err := description.HasCollectionByName(ctx, txn, desc.Name)
- if err != nil {
- return nil, err
- }
- if exists {
- return nil, ErrCollectionAlreadyExists
+ if desc.Name.HasValue() {
+ exists, err := description.HasCollectionByName(ctx, txn, desc.Name.Value())
+ if err != nil {
+ return nil, err
+ }
+ if exists {
+ return nil, ErrCollectionAlreadyExists
+ }
}
colSeq, err := db.getSequence(ctx, txn, core.COLLECTION)
@@ -132,7 +134,7 @@ func (db *db) createCollection(
}
}
- return db.getCollectionByName(ctx, txn, desc.Name)
+ return db.getCollectionByID(ctx, txn, desc.ID)
}
// updateSchema updates the persisted schema description matching the name of the given
@@ -201,6 +203,13 @@ func (db *db) updateSchema(
}
for _, col := range cols {
+ if !col.Name.HasValue() {
+ // Nameless collections cannot be made default as they cannot be queried without a name.
+ // Note: The `setAsDefaultVersion` block will need a re-write when collections become immutable
+ // and the schema version stuff gets tracked by [CollectionDescription.Sources] instead.
+ continue
+ }
+
col.SchemaVersionID = schema.VersionID
col, err = description.SaveCollection(ctx, txn, col)
@@ -208,7 +217,7 @@ func (db *db) updateSchema(
return err
}
- err = db.setDefaultSchemaVersionExplicit(ctx, txn, col.Name, schema.VersionID)
+ err = db.setDefaultSchemaVersionExplicit(ctx, txn, col.Name.Value(), schema.VersionID)
if err != nil {
return err
}
@@ -538,6 +547,26 @@ func (db *db) getCollectionsByVersionID(
return collections, nil
}
+func (db *db) getCollectionByID(ctx context.Context, txn datastore.Txn, id uint32) (client.Collection, error) {
+ col, err := description.GetCollectionByID(ctx, txn, id)
+ if err != nil {
+ return nil, err
+ }
+
+ schema, err := description.GetSchemaVersion(ctx, txn, col.SchemaVersionID)
+ if err != nil {
+ return nil, err
+ }
+
+ collection := db.newCollection(col, schema)
+ err = collection.loadIndexes(ctx, txn)
+ if err != nil {
+ return nil, err
+ }
+
+ return collection, nil
+}
+
// getCollectionByName returns an existing collection within the database.
func (db *db) getCollectionByName(ctx context.Context, txn datastore.Txn, name string) (client.Collection, error) {
if name == "" {
@@ -740,7 +769,7 @@ func (c *collection) Description() client.CollectionDescription {
}
// Name returns the collection name.
-func (c *collection) Name() string {
+func (c *collection) Name() immutable.Option[string] {
return c.Description().Name
}
@@ -989,7 +1018,7 @@ func (c *collection) save(
if isSecondaryRelationID {
primaryId := val.Value().(string)
- err = c.patchPrimaryDoc(ctx, txn, c.Name(), relationFieldDescription, primaryKey.DocID, primaryId)
+ err = c.patchPrimaryDoc(ctx, txn, c.Name().Value(), relationFieldDescription, primaryKey.DocID, primaryId)
if err != nil {
return cid.Undef, err
}
diff --git a/db/collection_index.go b/db/collection_index.go
index 4367d8ebdf..0557c00609 100644
--- a/db/collection_index.go
+++ b/db/collection_index.go
@@ -18,10 +18,13 @@ import (
"strconv"
"strings"
+ "github.com/sourcenetwork/immutable"
+
"github.com/sourcenetwork/defradb/client"
"github.com/sourcenetwork/defradb/core"
"github.com/sourcenetwork/defradb/datastore"
"github.com/sourcenetwork/defradb/db/base"
+ "github.com/sourcenetwork/defradb/db/description"
"github.com/sourcenetwork/defradb/db/fetcher"
"github.com/sourcenetwork/defradb/request/graphql/schema"
)
@@ -59,7 +62,7 @@ func (db *db) getAllIndexes(
ctx context.Context,
txn datastore.Txn,
) (map[client.CollectionName][]client.IndexDescription, error) {
- prefix := core.NewCollectionIndexKey("", "")
+ prefix := core.NewCollectionIndexKey(immutable.None[uint32](), "")
keys, indexDescriptions, err := datastore.DeserializePrefix[client.IndexDescription](ctx,
prefix.ToString(), txn.Systemstore())
@@ -75,8 +78,14 @@ func (db *db) getAllIndexes(
if err != nil {
return nil, NewErrInvalidStoredIndexKey(indexKey.ToString())
}
- indexes[indexKey.CollectionName] = append(
- indexes[indexKey.CollectionName],
+
+ col, err := description.GetCollectionByID(ctx, txn, indexKey.CollectionID.Value())
+ if err != nil {
+ return nil, err
+ }
+
+ indexes[col.Name.Value()] = append(
+ indexes[col.Name.Value()],
indexDescriptions[i],
)
}
@@ -87,9 +96,9 @@ func (db *db) getAllIndexes(
func (db *db) fetchCollectionIndexDescriptions(
ctx context.Context,
txn datastore.Txn,
- colName string,
+ colID uint32,
) ([]client.IndexDescription, error) {
- prefix := core.NewCollectionIndexKey(colName, "")
+ prefix := core.NewCollectionIndexKey(immutable.Some(colID), "")
_, indexDescriptions, err := datastore.DeserializePrefix[client.IndexDescription](ctx,
prefix.ToString(), txn.Systemstore())
if err != nil {
@@ -223,7 +232,8 @@ func (c *collection) createIndex(
c.indexes = append(c.indexes, colIndex)
err = c.indexExistingDocs(ctx, txn, colIndex)
if err != nil {
- return nil, err
+ removeErr := colIndex.RemoveAll(ctx, txn)
+ return nil, errors.Join(err, removeErr)
}
return colIndex, nil
}
@@ -338,7 +348,7 @@ func (c *collection) dropIndex(ctx context.Context, txn datastore.Txn, indexName
break
}
}
- key := core.NewCollectionIndexKey(c.Name(), indexName)
+ key := core.NewCollectionIndexKey(immutable.Some(c.ID()), indexName)
err = txn.Systemstore().Delete(ctx, key.ToDS())
if err != nil {
return err
@@ -348,7 +358,7 @@ func (c *collection) dropIndex(ctx context.Context, txn datastore.Txn, indexName
}
func (c *collection) dropAllIndexes(ctx context.Context, txn datastore.Txn) error {
- prefix := core.NewCollectionIndexKey(c.Name(), "")
+ prefix := core.NewCollectionIndexKey(immutable.Some(c.ID()), "")
keys, err := datastore.FetchKeysForPrefix(ctx, prefix.ToString(), txn.Systemstore())
if err != nil {
@@ -366,7 +376,7 @@ func (c *collection) dropAllIndexes(ctx context.Context, txn datastore.Txn) erro
}
func (c *collection) loadIndexes(ctx context.Context, txn datastore.Txn) error {
- indexDescriptions, err := c.db.fetchCollectionIndexDescriptions(ctx, txn, c.Name())
+ indexDescriptions, err := c.db.fetchCollectionIndexDescriptions(ctx, txn, c.ID())
if err != nil {
return err
}
@@ -428,7 +438,7 @@ func (c *collection) generateIndexNameIfNeededAndCreateKey(
nameIncrement := 1
for {
desc.Name = generateIndexName(c, desc.Fields, nameIncrement)
- indexKey = core.NewCollectionIndexKey(c.Name(), desc.Name)
+ indexKey = core.NewCollectionIndexKey(immutable.Some(c.ID()), desc.Name)
exists, err := txn.Systemstore().Has(ctx, indexKey.ToDS())
if err != nil {
return core.CollectionIndexKey{}, err
@@ -439,7 +449,7 @@ func (c *collection) generateIndexNameIfNeededAndCreateKey(
nameIncrement++
}
} else {
- indexKey = core.NewCollectionIndexKey(c.Name(), desc.Name)
+ indexKey = core.NewCollectionIndexKey(immutable.Some(c.ID()), desc.Name)
exists, err := txn.Systemstore().Has(ctx, indexKey.ToDS())
if err != nil {
return core.CollectionIndexKey{}, err
@@ -477,7 +487,11 @@ func generateIndexName(col client.Collection, fields []client.IndexedFieldDescri
// at the moment we support only single field indexes that can be stored only in
// ascending order. This will change once we introduce composite indexes.
direction := "ASC"
- sb.WriteString(col.Name())
+ if col.Name().HasValue() {
+ sb.WriteString(col.Name().Value())
+ } else {
+ sb.WriteString(fmt.Sprint(col.ID()))
+ }
sb.WriteByte('_')
// we can safely assume that there is at least one field in the slice
// because we validate it before calling this function
diff --git a/db/collection_update.go b/db/collection_update.go
index 4c1895602b..f4fc1eef6e 100644
--- a/db/collection_update.go
+++ b/db/collection_update.go
@@ -402,7 +402,7 @@ func (c *collection) makeSelectionPlan(
return nil, ErrInvalidFilter
}
- f, err = c.db.parser.NewFilterFromString(c.Name(), fval)
+ f, err = c.db.parser.NewFilterFromString(c.Name().Value(), fval)
if err != nil {
return nil, err
}
@@ -432,7 +432,7 @@ func (c *collection) makeSelectionPlan(
func (c *collection) makeSelectLocal(filter immutable.Option[request.Filter]) (*request.Select, error) {
slct := &request.Select{
Field: request.Field{
- Name: c.Name(),
+ Name: c.Name().Value(),
},
Filter: filter,
Fields: make([]request.Selection, 0),
diff --git a/db/description/collection.go b/db/description/collection.go
index a334ec6384..3daeaf31de 100644
--- a/db/description/collection.go
+++ b/db/description/collection.go
@@ -39,15 +39,17 @@ func SaveCollection(
return client.CollectionDescription{}, err
}
- idBuf, err := json.Marshal(desc.ID)
- if err != nil {
- return client.CollectionDescription{}, err
- }
+ if desc.Name.HasValue() {
+ idBuf, err := json.Marshal(desc.ID)
+ if err != nil {
+ return client.CollectionDescription{}, err
+ }
- nameKey := core.NewCollectionNameKey(desc.Name)
- err = txn.Systemstore().Put(ctx, nameKey.ToDS(), idBuf)
- if err != nil {
- return client.CollectionDescription{}, err
+ nameKey := core.NewCollectionNameKey(desc.Name.Value())
+ err = txn.Systemstore().Put(ctx, nameKey.ToDS(), idBuf)
+ if err != nil {
+ return client.CollectionDescription{}, err
+ }
}
// The need for this key is temporary, we should replace it with the global collection ID
@@ -61,6 +63,26 @@ func SaveCollection(
return desc, nil
}
+func GetCollectionByID(
+ ctx context.Context,
+ txn datastore.Txn,
+ id uint32,
+) (client.CollectionDescription, error) {
+ key := core.NewCollectionKey(id)
+ buf, err := txn.Systemstore().Get(ctx, key.ToDS())
+ if err != nil {
+ return client.CollectionDescription{}, err
+ }
+
+ var col client.CollectionDescription
+ err = json.Unmarshal(buf, &col)
+ if err != nil {
+ return client.CollectionDescription{}, err
+ }
+
+ return col, nil
+}
+
// GetCollectionByName returns the collection with the given name.
//
// If no collection of that name is found, it will return an error.
@@ -81,19 +103,7 @@ func GetCollectionByName(
return client.CollectionDescription{}, err
}
- key := core.NewCollectionKey(id)
- buf, err := txn.Systemstore().Get(ctx, key.ToDS())
- if err != nil {
- return client.CollectionDescription{}, err
- }
-
- var col client.CollectionDescription
- err = json.Unmarshal(buf, &col)
- if err != nil {
- return client.CollectionDescription{}, err
- }
-
- return col, nil
+ return GetCollectionByID(ctx, txn, id)
}
// GetCollectionsBySchemaVersionID returns all collections that use the given
diff --git a/db/errors.go b/db/errors.go
index d8c9773926..9b1f414fd6 100644
--- a/db/errors.go
+++ b/db/errors.go
@@ -69,7 +69,6 @@ const (
errInvalidFieldValue string = "invalid field value"
errUnsupportedIndexFieldType string = "unsupported index field type"
errIndexDescriptionHasNoFields string = "index description has no fields"
- errIndexDescHasNonExistingField string = "index description has non existing field"
errFieldOrAliasToFieldNotExist string = "The given field or alias to field does not exist"
errCreateFile string = "failed to create file"
errRemoveFile string = "failed to remove file"
@@ -86,7 +85,7 @@ const (
errExpectedJSONArray string = "expected JSON array"
errOneOneAlreadyLinked string = "target document is already linked to another document"
errIndexDoesNotMatchName string = "the index used does not match the given name"
- errCanNotIndexNonUniqueField string = "can not index a doc's field that violates unique index"
+ errCanNotIndexNonUniqueFields string = "can not index a doc's field(s) that violates unique index"
errInvalidViewQuery string = "the query provided is not valid as a View"
)
@@ -108,6 +107,7 @@ var (
ErrExpectedJSONObject = errors.New(errExpectedJSONObject)
ErrExpectedJSONArray = errors.New(errExpectedJSONArray)
ErrInvalidViewQuery = errors.New(errInvalidViewQuery)
+ ErrCanNotIndexNonUniqueFields = errors.New(errCanNotIndexNonUniqueFields)
)
// NewErrFailedToGetHeads returns a new error indicating that the heads of a document
@@ -468,16 +468,6 @@ func NewErrIndexDescHasNoFields(desc client.IndexDescription) error {
)
}
-// NewErrIndexDescHasNonExistingField returns a new error indicating that the given index
-// description points to a field that does not exist.
-func NewErrIndexDescHasNonExistingField(desc client.IndexDescription, fieldName string) error {
- return errors.New(
- errIndexDescHasNonExistingField,
- errors.NewKV("Description", desc),
- errors.NewKV("Field name", fieldName),
- )
-}
-
// NewErrCreateFile returns a new error indicating there was a failure in creating a file.
func NewErrCreateFile(inner error, filepath string) error {
return errors.Wrap(errCreateFile, inner, errors.NewKV("Filepath", filepath))
@@ -566,13 +556,12 @@ func NewErrIndexDoesNotMatchName(index, name string) error {
)
}
-func NewErrCanNotIndexNonUniqueField(docID, fieldName string, value any) error {
- return errors.New(
- errCanNotIndexNonUniqueField,
- errors.NewKV("DocID", docID),
- errors.NewKV("Field name", fieldName),
- errors.NewKV("Field value", value),
- )
+func NewErrCanNotIndexNonUniqueFields(docID string, fieldValues ...errors.KV) error {
+ kvPairs := make([]errors.KV, 0, len(fieldValues)+1)
+ kvPairs = append(kvPairs, errors.NewKV("DocID", docID))
+ kvPairs = append(kvPairs, fieldValues...)
+
+ return errors.New(errCanNotIndexNonUniqueFields, kvPairs...)
}
func NewErrInvalidViewQueryCastFailed(query string) error {
diff --git a/db/fetcher/errors.go b/db/fetcher/errors.go
index 84d947c46f..6e4f3a6abb 100644
--- a/db/fetcher/errors.go
+++ b/db/fetcher/errors.go
@@ -26,6 +26,8 @@ const (
errVFetcherFailedToGetDagLink string = "(version fetcher) failed to get node link from DAG"
errFailedToGetDagNode string = "failed to get DAG Node"
errMissingMapper string = "missing document mapper"
+ errInvalidInOperatorValue string = "invalid _in/_nin value"
+ errInvalidIndexFilterCondition string = "invalid index filter condition"
)
var (
@@ -41,6 +43,8 @@ var (
ErrFailedToGetDagNode = errors.New(errFailedToGetDagNode)
ErrMissingMapper = errors.New(errMissingMapper)
ErrSingleSpanOnly = errors.New("spans must contain only a single entry")
+ ErrInvalidInOperatorValue = errors.New(errInvalidInOperatorValue)
+ ErrInvalidIndexFilterCondition = errors.New(errInvalidIndexFilterCondition)
)
// NewErrFieldIdNotFound returns an error indicating that the given FieldId was not found.
diff --git a/db/fetcher/indexer.go b/db/fetcher/indexer.go
index b8608e2b7d..f8cc45225a 100644
--- a/db/fetcher/indexer.go
+++ b/db/fetcher/indexer.go
@@ -23,19 +23,18 @@ import (
// IndexFetcher is a fetcher that fetches documents by index.
// It fetches only the indexed field and the rest of the fields are fetched by the internal fetcher.
type IndexFetcher struct {
- docFetcher Fetcher
- col client.Collection
- txn datastore.Txn
- indexFilter *mapper.Filter
- docFilter *mapper.Filter
- doc *encodedDocument
- mapping *core.DocumentMapping
- indexedField client.FieldDescription
- docFields []client.FieldDescription
- indexDesc client.IndexDescription
- indexIter indexIterator
- indexDataStoreKey core.IndexDataStoreKey
- execInfo ExecInfo
+ docFetcher Fetcher
+ col client.Collection
+ txn datastore.Txn
+ indexFilter *mapper.Filter
+ docFilter *mapper.Filter
+ doc *encodedDocument
+ mapping *core.DocumentMapping
+ indexedFields []client.FieldDescription
+ docFields []client.FieldDescription
+ indexDesc client.IndexDescription
+ indexIter indexIterator
+ execInfo ExecInfo
}
var _ Fetcher = (*IndexFetcher)(nil)
@@ -43,13 +42,13 @@ var _ Fetcher = (*IndexFetcher)(nil)
// NewIndexFetcher creates a new IndexFetcher.
func NewIndexFetcher(
docFetcher Fetcher,
- indexedFieldDesc client.FieldDescription,
+ indexDesc client.IndexDescription,
indexFilter *mapper.Filter,
) *IndexFetcher {
return &IndexFetcher{
- docFetcher: docFetcher,
- indexedField: indexedFieldDesc,
- indexFilter: indexFilter,
+ docFetcher: docFetcher,
+ indexDesc: indexDesc,
+ indexFilter: indexFilter,
}
}
@@ -69,24 +68,27 @@ func (f *IndexFetcher) Init(
f.mapping = docMapper
f.txn = txn
- for _, index := range col.Description().Indexes {
- if index.Fields[0].Name == f.indexedField.Name {
- f.indexDesc = index
- f.indexDataStoreKey.IndexID = index.ID
- break
+ for _, indexedField := range f.indexDesc.Fields {
+ for _, field := range f.col.Schema().Fields {
+ if field.Name == indexedField.Name {
+ f.indexedFields = append(f.indexedFields, field)
+ break
+ }
}
}
- f.indexDataStoreKey.CollectionID = f.col.ID()
-
+ f.docFields = make([]client.FieldDescription, 0, len(fields))
+outer:
for i := range fields {
- if fields[i].Name == f.indexedField.Name {
- f.docFields = append(fields[:i], fields[i+1:]...)
- break
+ for j := range f.indexedFields {
+ if fields[i].Name == f.indexedFields[j].Name {
+ continue outer
+ }
}
+ f.docFields = append(f.docFields, fields[i])
}
- iter, err := createIndexIterator(f.indexDataStoreKey, f.indexFilter, &f.execInfo, f.indexDesc.Unique)
+ iter, err := f.createIndexIterator()
if err != nil {
return err
}
@@ -123,17 +125,21 @@ func (f *IndexFetcher) FetchNext(ctx context.Context) (EncodedDocument, ExecInfo
return nil, f.execInfo, nil
}
- property := &encProperty{
- Desc: f.indexedField,
- Raw: res.key.FieldValues[0],
+ for i, indexedField := range f.indexedFields {
+ property := &encProperty{
+ Desc: indexedField,
+ Raw: res.key.FieldValues[i],
+ }
+
+ f.doc.properties[indexedField] = property
}
if f.indexDesc.Unique {
f.doc.id = res.value
} else {
- f.doc.id = res.key.FieldValues[1]
+ f.doc.id = res.key.FieldValues[len(res.key.FieldValues)-1]
}
- f.doc.properties[f.indexedField] = property
+
f.execInfo.FieldsFetched++
if f.docFetcher != nil && len(f.docFields) > 0 {
diff --git a/db/fetcher/indexer_iterators.go b/db/fetcher/indexer_iterators.go
index aa24605559..76786f5050 100644
--- a/db/fetcher/indexer_iterators.go
+++ b/db/fetcher/indexer_iterators.go
@@ -39,6 +39,12 @@ const (
opNin = "_nin"
opLike = "_like"
opNlike = "_nlike"
+ // it's just there for composite indexes. We construct a slice of value matchers with
+ // every matcher being responsible for a corresponding field in the index to match.
+ // For some fields there might not be any criteria to match. For examples if you have
+ // composite index of /name/age/email/ and in the filter you specify only "name" and "email".
+ // Then the "_any" matcher will be used for "age".
+ opAny = "_any"
)
// indexIterator is an iterator over index keys.
@@ -80,15 +86,16 @@ func (i *queryResultIterator) Close() error {
}
type eqPrefixIndexIterator struct {
- filterValueHolder
- indexKey core.IndexDataStoreKey
- execInfo *ExecInfo
+ indexKey core.IndexDataStoreKey
+ keyFieldValue []byte
+ execInfo *ExecInfo
+ matchers []valueMatcher
queryResultIterator
}
func (i *eqPrefixIndexIterator) Init(ctx context.Context, store datastore.DSReaderWriter) error {
- i.indexKey.FieldValues = [][]byte{i.value}
+ i.indexKey.FieldValues = [][]byte{i.keyFieldValue}
resultIter, err := store.Query(ctx, query.Query{
Prefix: i.indexKey.ToString(),
})
@@ -100,35 +107,36 @@ func (i *eqPrefixIndexIterator) Init(ctx context.Context, store datastore.DSRead
}
func (i *eqPrefixIndexIterator) Next() (indexIterResult, error) {
- res, err := i.queryResultIterator.Next()
- if res.foundKey {
+ for {
+ res, err := i.queryResultIterator.Next()
+ if err != nil || !res.foundKey {
+ return res, err
+ }
i.execInfo.IndexesFetched++
+ doesMatch, err := executeValueMatchers(i.matchers, res.key.FieldValues)
+ if err != nil {
+ return indexIterResult{}, err
+ }
+ if !doesMatch {
+ continue
+ }
+ return res, err
}
- return res, err
-}
-
-type filterValueIndexIterator interface {
- indexIterator
- SetFilterValue([]byte)
-}
-
-type filterValueHolder struct {
- value []byte
-}
-
-func (h *filterValueHolder) SetFilterValue(value []byte) {
- h.value = value
}
type eqSingleIndexIterator struct {
- filterValueHolder
- indexKey core.IndexDataStoreKey
- execInfo *ExecInfo
+ indexKey core.IndexDataStoreKey
+ keyFieldValues [][]byte
+ execInfo *ExecInfo
ctx context.Context
store datastore.DSReaderWriter
}
+func (i *eqSingleIndexIterator) SetKeyFieldValue(value []byte) {
+ i.keyFieldValues = [][]byte{value}
+}
+
func (i *eqSingleIndexIterator) Init(ctx context.Context, store datastore.DSReaderWriter) error {
i.ctx = ctx
i.store = store
@@ -139,7 +147,7 @@ func (i *eqSingleIndexIterator) Next() (indexIterResult, error) {
if i.store == nil {
return indexIterResult{}, nil
}
- i.indexKey.FieldValues = [][]byte{i.value}
+ i.indexKey.FieldValues = i.keyFieldValues
val, err := i.store.Get(i.ctx, i.indexKey.ToDS())
if err != nil {
if errors.Is(err, ds.ErrNotFound) {
@@ -157,38 +165,33 @@ func (i *eqSingleIndexIterator) Close() error {
}
type inIndexIterator struct {
- filterValueIndexIterator
- filterValues [][]byte
- nextValIndex int
- ctx context.Context
- store datastore.DSReaderWriter
- hasIterator bool
-}
-
-func newInIndexIterator(
- indexIter filterValueIndexIterator,
- filterValues [][]byte,
-) *inIndexIterator {
- return &inIndexIterator{
- filterValueIndexIterator: indexIter,
- filterValues: filterValues,
- }
+ indexIterator
+ keyFieldValues [][]byte
+ nextValIndex int
+ ctx context.Context
+ store datastore.DSReaderWriter
+ hasIterator bool
}
func (i *inIndexIterator) nextIterator() (bool, error) {
if i.nextValIndex > 0 {
- err := i.filterValueIndexIterator.Close()
+ err := i.indexIterator.Close()
if err != nil {
return false, err
}
}
- if i.nextValIndex >= len(i.filterValues) {
+ if i.nextValIndex >= len(i.keyFieldValues) {
return false, nil
}
- i.SetFilterValue(i.filterValues[i.nextValIndex])
- err := i.filterValueIndexIterator.Init(i.ctx, i.store)
+ switch fieldIter := i.indexIterator.(type) {
+ case *eqPrefixIndexIterator:
+ fieldIter.keyFieldValue = i.keyFieldValues[i.nextValIndex]
+ case *eqSingleIndexIterator:
+ fieldIter.keyFieldValues[0] = i.keyFieldValues[i.nextValIndex]
+ }
+ err := i.indexIterator.Init(i.ctx, i.store)
if err != nil {
return false, err
}
@@ -206,7 +209,7 @@ func (i *inIndexIterator) Init(ctx context.Context, store datastore.DSReaderWrit
func (i *inIndexIterator) Next() (indexIterResult, error) {
for i.hasIterator {
- res, err := i.filterValueIndexIterator.Next()
+ res, err := i.indexIterator.Next()
if err != nil {
return indexIterResult{}, err
}
@@ -227,49 +230,52 @@ func (i *inIndexIterator) Close() error {
}
type errorCheckingFilter struct {
- matcher indexMatcher
- err error
+ matchers []valueMatcher
+ err error
+ execInfo *ExecInfo
+}
+
+func executeValueMatchers(matchers []valueMatcher, values [][]byte) (bool, error) {
+ for i := range matchers {
+ res, err := matchers[i].Match(values[i])
+ if err != nil {
+ return false, err
+ }
+ if !res {
+ return false, nil
+ }
+ }
+ return true, nil
}
func (f *errorCheckingFilter) Filter(e query.Entry) bool {
if f.err != nil {
return false
}
+ f.execInfo.IndexesFetched++
+
indexKey, err := core.NewIndexDataStoreKey(e.Key)
if err != nil {
f.err = err
return false
}
- res, err := f.matcher.Match(indexKey)
- if err != nil {
- f.err = err
- return false
- }
- return res
-}
-// execInfoIndexMatcherDecorator is a decorator for indexMatcher that counts the number
-// of indexes fetched on every call to Match.
-type execInfoIndexMatcherDecorator struct {
- matcher indexMatcher
- execInfo *ExecInfo
-}
-
-func (d *execInfoIndexMatcherDecorator) Match(key core.IndexDataStoreKey) (bool, error) {
- d.execInfo.IndexesFetched++
- return d.matcher.Match(key)
+ var res bool
+ res, f.err = executeValueMatchers(f.matchers, indexKey.FieldValues)
+ return res
}
type scanningIndexIterator struct {
queryResultIterator
indexKey core.IndexDataStoreKey
- matcher indexMatcher
+ matchers []valueMatcher
filter errorCheckingFilter
execInfo *ExecInfo
}
func (i *scanningIndexIterator) Init(ctx context.Context, store datastore.DSReaderWriter) error {
- i.filter.matcher = &execInfoIndexMatcherDecorator{matcher: i.matcher, execInfo: i.execInfo}
+ i.filter.matchers = i.matchers
+ i.filter.execInfo = i.execInfo
iter, err := store.Query(ctx, query.Query{
Prefix: i.indexKey.ToString(),
@@ -291,9 +297,9 @@ func (i *scanningIndexIterator) Next() (indexIterResult, error) {
return res, err
}
-// checks if the stored index value satisfies the condition
-type indexMatcher interface {
- Match(core.IndexDataStoreKey) (bool, error)
+// checks if the value satisfies the condition
+type valueMatcher interface {
+ Match([]byte) (bool, error)
}
// indexByteValuesMatcher is a filter that compares the index value with a given value.
@@ -304,20 +310,11 @@ type indexByteValuesMatcher struct {
evalFunc func(int) bool
}
-func (m *indexByteValuesMatcher) Match(key core.IndexDataStoreKey) (bool, error) {
- res := bytes.Compare(key.FieldValues[0], m.value)
+func (m *indexByteValuesMatcher) Match(value []byte) (bool, error) {
+ res := bytes.Compare(value, m.value)
return m.evalFunc(res), nil
}
-// matcher if _ne condition is met
-type neIndexMatcher struct {
- value []byte
-}
-
-func (m *neIndexMatcher) Match(key core.IndexDataStoreKey) (bool, error) {
- return !bytes.Equal(key.FieldValues[0], m.value), nil
-}
-
// checks if the index value is or is not in the given array
type indexInArrayMatcher struct {
values map[string]bool
@@ -332,8 +329,8 @@ func newNinIndexCmp(values [][]byte, isIn bool) *indexInArrayMatcher {
return &indexInArrayMatcher{values: valuesMap, isIn: isIn}
}
-func (m *indexInArrayMatcher) Match(key core.IndexDataStoreKey) (bool, error) {
- _, found := m.values[string(key.FieldValues[0])]
+func (m *indexInArrayMatcher) Match(value []byte) (bool, error) {
+ _, found := m.values[string(value)]
return found == m.isIn, nil
}
@@ -368,9 +365,9 @@ func newLikeIndexCmp(filterValue string, isLike bool) *indexLikeMatcher {
return matcher
}
-func (m *indexLikeMatcher) Match(key core.IndexDataStoreKey) (bool, error) {
+func (m *indexLikeMatcher) Match(value []byte) (bool, error) {
var currentVal string
- err := cbor.Unmarshal(key.FieldValues[0], ¤tVal)
+ err := cbor.Unmarshal(value, ¤tVal)
if err != nil {
return false, err
}
@@ -395,25 +392,11 @@ func (m *indexLikeMatcher) doesMatch(currentVal string) bool {
}
}
-func createIndexIterator(
- indexDataStoreKey core.IndexDataStoreKey,
- indexFilterConditions *mapper.Filter,
- execInfo *ExecInfo,
- isUnique bool,
-) (indexIterator, error) {
- var op string
- var filterVal any
- for _, indexFilterCond := range indexFilterConditions.Conditions {
- condMap := indexFilterCond.(map[connor.FilterKey]any)
- var key connor.FilterKey
- for key, filterVal = range condMap {
- break
- }
- opKey := key.(*mapper.Operator)
- op = opKey.Operation
- break
- }
+type anyMatcher struct{}
+func (m *anyMatcher) Match([]byte) (bool, error) { return true, nil }
+
+func createValueMatcher(op string, filterVal any) (valueMatcher, error) {
switch op {
case opEq, opGt, opGe, opLt, opLe, opNe:
fieldValue := client.NewFieldValue(client.LWW_REGISTER, filterVal)
@@ -423,74 +406,26 @@ func createIndexIterator(
return nil, err
}
+ m := &indexByteValuesMatcher{value: valueBytes}
switch op {
case opEq:
- if isUnique {
- return &eqSingleIndexIterator{
- indexKey: indexDataStoreKey,
- filterValueHolder: filterValueHolder{
- value: valueBytes,
- },
- execInfo: execInfo,
- }, nil
- } else {
- return &eqPrefixIndexIterator{
- indexKey: indexDataStoreKey,
- filterValueHolder: filterValueHolder{
- value: valueBytes,
- },
- execInfo: execInfo,
- }, nil
- }
+ m.evalFunc = func(res int) bool { return res == 0 }
case opGt:
- return &scanningIndexIterator{
- indexKey: indexDataStoreKey,
- matcher: &indexByteValuesMatcher{
- value: valueBytes,
- evalFunc: func(res int) bool { return res > 0 },
- },
- execInfo: execInfo,
- }, nil
+ m.evalFunc = func(res int) bool { return res > 0 }
case opGe:
- return &scanningIndexIterator{
- indexKey: indexDataStoreKey,
- matcher: &indexByteValuesMatcher{
- value: valueBytes,
- evalFunc: func(res int) bool { return res > 0 || res == 0 },
- },
- execInfo: execInfo,
- }, nil
+ m.evalFunc = func(res int) bool { return res > 0 || res == 0 }
case opLt:
- return &scanningIndexIterator{
- indexKey: indexDataStoreKey,
- matcher: &indexByteValuesMatcher{
- value: valueBytes,
- evalFunc: func(res int) bool { return res < 0 },
- },
- execInfo: execInfo,
- }, nil
+ m.evalFunc = func(res int) bool { return res < 0 }
case opLe:
- return &scanningIndexIterator{
- indexKey: indexDataStoreKey,
- matcher: &indexByteValuesMatcher{
- value: valueBytes,
- evalFunc: func(res int) bool { return res < 0 || res == 0 },
- },
- execInfo: execInfo,
- }, nil
+ m.evalFunc = func(res int) bool { return res < 0 || res == 0 }
case opNe:
- return &scanningIndexIterator{
- indexKey: indexDataStoreKey,
- matcher: &neIndexMatcher{
- value: valueBytes,
- },
- execInfo: execInfo,
- }, nil
+ m.evalFunc = func(res int) bool { return res != 0 }
}
+ return m, nil
case opIn, opNin:
inArr, ok := filterVal.([]any)
if !ok {
- return nil, errors.New("invalid _in/_nin value")
+ return nil, ErrInvalidInOperatorValue
}
valArr := make([][]byte, 0, len(inArr))
for _, v := range inArr {
@@ -501,38 +436,181 @@ func createIndexIterator(
}
valArr = append(valArr, valueBytes)
}
- if op == opIn {
- var iter filterValueIndexIterator
- if isUnique {
- iter = &eqSingleIndexIterator{
- indexKey: indexDataStoreKey,
- execInfo: execInfo,
- }
- } else {
- iter = &eqPrefixIndexIterator{
- indexKey: indexDataStoreKey,
- execInfo: execInfo,
- }
+ return newNinIndexCmp(valArr, op == opIn), nil
+ case opLike, opNlike:
+ return newLikeIndexCmp(filterVal.(string), op == opLike), nil
+ case opAny:
+ return &anyMatcher{}, nil
+ }
+
+ return nil, ErrInvalidIndexFilterCondition
+}
+
+func createValueMatchers(conditions []fieldFilterCond) ([]valueMatcher, error) {
+ matchers := make([]valueMatcher, 0, len(conditions))
+ for i := range conditions {
+ m, err := createValueMatcher(conditions[i].op, conditions[i].val)
+ if err != nil {
+ return nil, err
+ }
+ matchers = append(matchers, m)
+ }
+ return matchers, nil
+}
+
+type fieldFilterCond struct {
+ op string
+ val any
+}
+
+func (f *IndexFetcher) determineFieldFilterConditions() []fieldFilterCond {
+ result := make([]fieldFilterCond, 0, len(f.indexedFields))
+ for i := range f.indexedFields {
+ fieldInd := f.mapping.FirstIndexOfName(f.indexedFields[i].Name)
+ found := false
+ // iterate through conditions and find the one that matches the current field
+ for filterKey, indexFilterCond := range f.indexFilter.Conditions {
+ propKey, ok := filterKey.(*mapper.PropertyIndex)
+ if !ok || fieldInd != propKey.Index {
+ continue
+ }
+
+ found = true
+
+ condMap := indexFilterCond.(map[connor.FilterKey]any)
+ for key, filterVal := range condMap {
+ opKey := key.(*mapper.Operator)
+ result = append(result, fieldFilterCond{op: opKey.Operation, val: filterVal})
+ break
+ }
+ break
+ }
+ if !found {
+ result = append(result, fieldFilterCond{op: opAny})
+ }
+ }
+ return result
+}
+
+// isUniqueFetchByFullKey checks if the only index key can be fetched by the full index key.
+//
+// This method ignores the first condition because it's expected to be called only
+// when the first field is used as a prefix in the index key. So we only check if the
+// rest of the conditions are _eq.
+func isUniqueFetchByFullKey(indexDesc *client.IndexDescription, conditions []fieldFilterCond) bool {
+ // we need to check length of conditions because full key fetch is only possible
+ // if all fields are specified in the filter
+ res := indexDesc.Unique && len(conditions) == len(indexDesc.Fields)
+ for i := 1; i < len(conditions); i++ {
+ res = res && conditions[i].op == opEq
+ }
+ return res
+}
+
+func getFieldsBytes(conditions []fieldFilterCond) ([][]byte, error) {
+ result := make([][]byte, 0, len(conditions))
+ for i := range conditions {
+ fieldVal := client.NewFieldValue(client.LWW_REGISTER, conditions[i].val)
+ keyFieldBytes, err := fieldVal.Bytes()
+ if err != nil {
+ return nil, err
+ }
+ result = append(result, keyFieldBytes)
+ }
+ return result, nil
+}
+
+func (f *IndexFetcher) createIndexIterator() (indexIterator, error) {
+ fieldConditions := f.determineFieldFilterConditions()
+ indexDataStoreKey := core.IndexDataStoreKey{CollectionID: f.col.ID(), IndexID: f.indexDesc.ID}
+
+ matchers, err := createValueMatchers(fieldConditions)
+ if err != nil {
+ return nil, err
+ }
+
+ switch fieldConditions[0].op {
+ case opEq:
+ if isUniqueFetchByFullKey(&f.indexDesc, fieldConditions) {
+ keyFieldsBytes, err := getFieldsBytes(fieldConditions)
+ if err != nil {
+ return nil, err
}
- return newInIndexIterator(iter, valArr), nil
+ return &eqSingleIndexIterator{
+ indexKey: indexDataStoreKey,
+ keyFieldValues: keyFieldsBytes,
+ execInfo: &f.execInfo,
+ }, nil
} else {
- return &scanningIndexIterator{
- indexKey: indexDataStoreKey,
- matcher: newNinIndexCmp(valArr, false),
- execInfo: execInfo,
+ fieldVal := client.NewFieldValue(client.LWW_REGISTER, fieldConditions[0].val)
+
+ keyValueBytes, err := fieldVal.Bytes()
+ if err != nil {
+ return nil, err
+ }
+
+ // iterators for _eq filter already iterate over keys with first field value
+ // matching the filter value, so we can skip the first matcher
+ if len(matchers) > 1 {
+ matchers[0] = &anyMatcher{}
+ }
+
+ return &eqPrefixIndexIterator{
+ indexKey: indexDataStoreKey,
+ keyFieldValue: keyValueBytes,
+ execInfo: &f.execInfo,
+ matchers: matchers,
}, nil
}
- case opLike:
- return &scanningIndexIterator{
- indexKey: indexDataStoreKey,
- matcher: newLikeIndexCmp(filterVal.(string), true),
- execInfo: execInfo,
+ case opIn:
+ inArr, ok := fieldConditions[0].val.([]any)
+ if !ok {
+ return nil, errors.New("invalid _in/_nin value")
+ }
+ keyFieldArr := make([][]byte, 0, len(inArr))
+ for _, v := range inArr {
+ fieldVal := client.NewFieldValue(client.LWW_REGISTER, v)
+ keyFieldBytes, err := fieldVal.Bytes()
+ if err != nil {
+ return nil, err
+ }
+ keyFieldArr = append(keyFieldArr, keyFieldBytes)
+ }
+
+ // iterators for _in filter already iterate over keys with first field value
+ // matching the filter value, so we can skip the first matcher
+ if len(matchers) > 1 {
+ matchers[0] = &anyMatcher{}
+ }
+
+ var iter indexIterator
+ if isUniqueFetchByFullKey(&f.indexDesc, fieldConditions) {
+ restFieldsVals, e := getFieldsBytes(fieldConditions[1:])
+ if e != nil {
+ return nil, e
+ }
+ restFieldsVals = append([][]byte{{}}, restFieldsVals...)
+ iter = &eqSingleIndexIterator{
+ indexKey: indexDataStoreKey,
+ execInfo: &f.execInfo,
+ keyFieldValues: restFieldsVals,
+ }
+ } else {
+ iter = &eqPrefixIndexIterator{
+ indexKey: indexDataStoreKey,
+ execInfo: &f.execInfo,
+ matchers: matchers,
+ }
+ }
+ return &inIndexIterator{
+ indexIterator: iter,
+ keyFieldValues: keyFieldArr,
}, nil
- case opNlike:
+ case opGt, opGe, opLt, opLe, opNe, opNin, opLike, opNlike:
return &scanningIndexIterator{
indexKey: indexDataStoreKey,
- matcher: newLikeIndexCmp(filterVal.(string), false),
- execInfo: execInfo,
+ matchers: matchers,
+ execInfo: &f.execInfo,
}, nil
}
diff --git a/db/index.go b/db/index.go
index 59fd25eaa9..d76573de65 100644
--- a/db/index.go
+++ b/db/index.go
@@ -90,16 +90,20 @@ func NewCollectionIndex(
if len(desc.Fields) == 0 {
return nil, NewErrIndexDescHasNoFields(desc)
}
- field, foundField := collection.Schema().GetField(desc.Fields[0].Name)
- if !foundField {
- return nil, NewErrIndexDescHasNonExistingField(desc, desc.Fields[0].Name)
- }
base := collectionBaseIndex{collection: collection, desc: desc}
- base.fieldDesc = field
- var err error
- base.validateFieldFunc, err = getFieldValidateFunc(field.Kind)
- if err != nil {
- return nil, err
+ base.validateFieldFuncs = make([]func(any) bool, 0, len(desc.Fields))
+ base.fieldsDescs = make([]client.FieldDescription, 0, len(desc.Fields))
+ for _, fieldDesc := range desc.Fields {
+ field, foundField := collection.Schema().GetField(fieldDesc.Name)
+ if !foundField {
+ return nil, client.NewErrFieldNotExist(desc.Fields[0].Name)
+ }
+ base.fieldsDescs = append(base.fieldsDescs, field)
+ validateFunc, err := getFieldValidateFunc(field.Kind)
+ if err != nil {
+ return nil, err
+ }
+ base.validateFieldFuncs = append(base.validateFieldFuncs, validateFunc)
}
if desc.Unique {
return &collectionUniqueIndex{collectionBaseIndex: base}, nil
@@ -109,34 +113,43 @@ func NewCollectionIndex(
}
type collectionBaseIndex struct {
- collection client.Collection
- desc client.IndexDescription
- validateFieldFunc func(any) bool
- fieldDesc client.FieldDescription
+ collection client.Collection
+ desc client.IndexDescription
+ validateFieldFuncs []func(any) bool
+ fieldsDescs []client.FieldDescription
}
-func (i *collectionBaseIndex) getDocFieldValue(doc *client.Document) ([]byte, error) {
- // collectionSimpleIndex only supports single field indexes, that's why we
- // can safely access the first field
- indexedFieldName := i.desc.Fields[0].Name
- fieldVal, err := doc.GetValue(indexedFieldName)
- if err != nil {
- if errors.Is(err, client.ErrFieldNotExist) {
- return client.NewFieldValue(client.LWW_REGISTER, nil).Bytes()
- } else {
+func (i *collectionBaseIndex) getDocFieldValue(doc *client.Document) ([][]byte, error) {
+ result := make([][]byte, 0, len(i.fieldsDescs))
+ for iter := range i.fieldsDescs {
+ fieldVal, err := doc.GetValue(i.fieldsDescs[iter].Name)
+ if err != nil {
+ if errors.Is(err, client.ErrFieldNotExist) {
+ valBytes, err := client.NewFieldValue(client.LWW_REGISTER, nil).Bytes()
+ if err != nil {
+ return nil, err
+ }
+ result = append(result, valBytes)
+ continue
+ }
return nil, err
}
+ if !i.validateFieldFuncs[iter](fieldVal.Value()) {
+ return nil, NewErrInvalidFieldValue(i.fieldsDescs[iter].Kind, fieldVal)
+ }
+ valBytes, err := fieldVal.Bytes()
+ if err != nil {
+ return nil, err
+ }
+ result = append(result, valBytes)
}
- if !i.validateFieldFunc(fieldVal.Value()) {
- return nil, NewErrInvalidFieldValue(i.fieldDesc.Kind, fieldVal)
- }
- return fieldVal.Bytes()
+ return result, nil
}
func (i *collectionBaseIndex) getDocumentsIndexKey(
doc *client.Document,
) (core.IndexDataStoreKey, error) {
- fieldValue, err := i.getDocFieldValue(doc)
+ fieldValues, err := i.getDocFieldValue(doc)
if err != nil {
return core.IndexDataStoreKey{}, err
}
@@ -144,7 +157,7 @@ func (i *collectionBaseIndex) getDocumentsIndexKey(
indexDataStoreKey := core.IndexDataStoreKey{}
indexDataStoreKey.CollectionID = i.collection.ID()
indexDataStoreKey.IndexID = i.desc.ID
- indexDataStoreKey.FieldValues = [][]byte{fieldValue}
+ indexDataStoreKey.FieldValues = fieldValues
return indexDataStoreKey, nil
}
@@ -289,19 +302,23 @@ func (i *collectionUniqueIndex) Save(
func (i *collectionUniqueIndex) newUniqueIndexError(
doc *client.Document,
) error {
- fieldVal, err := doc.GetValue(i.fieldDesc.Name)
- var val any
- if err != nil {
- // If the error is ErrFieldNotExist, we leave `val` as is (e.g. nil)
- // otherwise we return the error
- if !errors.Is(err, client.ErrFieldNotExist) {
- return err
+ kvs := make([]errors.KV, 0, len(i.fieldsDescs))
+ for iter := range i.fieldsDescs {
+ fieldVal, err := doc.GetValue(i.fieldsDescs[iter].Name)
+ var val any
+ if err != nil {
+ // If the error is ErrFieldNotExist, we leave `val` as is (e.g. nil)
+ // otherwise we return the error
+ if !errors.Is(err, client.ErrFieldNotExist) {
+ return err
+ }
+ } else {
+ val = fieldVal.Value()
}
- } else {
- val = fieldVal.Value()
+ kvs = append(kvs, errors.NewKV(i.fieldsDescs[iter].Name, val))
}
- return NewErrCanNotIndexNonUniqueField(doc.ID().String(), i.fieldDesc.Name, val)
+ return NewErrCanNotIndexNonUniqueFields(doc.ID().String(), kvs...)
}
func (i *collectionUniqueIndex) Update(
diff --git a/db/index_test.go b/db/index_test.go
index 911228e649..ba7d62e8de 100644
--- a/db/index_test.go
+++ b/db/index_test.go
@@ -20,6 +20,7 @@ import (
ds "github.com/ipfs/go-datastore"
"github.com/ipfs/go-datastore/query"
+ "github.com/sourcenetwork/immutable"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/mock"
"github.com/stretchr/testify/require"
@@ -59,6 +60,10 @@ type indexTestFixture struct {
}
func (f *indexTestFixture) addUsersCollection() client.Collection {
+ if f.users != nil {
+ return f.users
+ }
+
_, err := f.db.AddSchema(
f.ctx,
fmt.Sprintf(
@@ -136,7 +141,7 @@ func newIndexTestFixture(t *testing.T) *indexTestFixture {
func (f *indexTestFixture) createCollectionIndex(
desc client.IndexDescription,
) (client.IndexDescription, error) {
- return f.createCollectionIndexFor(f.users.Name(), desc)
+ return f.createCollectionIndexFor(f.users.Name().Value(), desc)
}
func getUsersIndexDescOnName() client.IndexDescription {
@@ -176,7 +181,7 @@ func getProductsIndexDescOnCategory() client.IndexDescription {
}
func (f *indexTestFixture) createUserCollectionIndexOnName() client.IndexDescription {
- newDesc, err := f.createCollectionIndexFor(f.users.Name(), getUsersIndexDescOnName())
+ newDesc, err := f.createCollectionIndexFor(f.users.Name().Value(), getUsersIndexDescOnName())
require.NoError(f.t, err)
return newDesc
}
@@ -188,13 +193,27 @@ func makeUnique(indexDesc client.IndexDescription) client.IndexDescription {
func (f *indexTestFixture) createUserCollectionUniqueIndexOnName() client.IndexDescription {
indexDesc := makeUnique(getUsersIndexDescOnName())
- newDesc, err := f.createCollectionIndexFor(f.users.Name(), indexDesc)
+ newDesc, err := f.createCollectionIndexFor(f.users.Name().Value(), indexDesc)
+ require.NoError(f.t, err)
+ return newDesc
+}
+
+func addFieldToIndex(indexDesc client.IndexDescription, fieldName string) client.IndexDescription {
+ indexDesc.Fields = append(indexDesc.Fields, client.IndexedFieldDescription{
+ Name: fieldName, Direction: client.Ascending,
+ })
+ return indexDesc
+}
+
+func (f *indexTestFixture) createUserCollectionIndexOnNameAndAge() client.IndexDescription {
+ indexDesc := addFieldToIndex(getUsersIndexDescOnName(), usersAgeFieldName)
+ newDesc, err := f.createCollectionIndexFor(f.users.Name().Value(), indexDesc)
require.NoError(f.t, err)
return newDesc
}
func (f *indexTestFixture) createUserCollectionIndexOnAge() client.IndexDescription {
- newDesc, err := f.createCollectionIndexFor(f.users.Name(), getUsersIndexDescOnAge())
+ newDesc, err := f.createCollectionIndexFor(f.users.Name().Value(), getUsersIndexDescOnAge())
require.NoError(f.t, err)
return newDesc
}
@@ -204,7 +223,7 @@ func (f *indexTestFixture) dropIndex(colName, indexName string) error {
}
func (f *indexTestFixture) countIndexPrefixes(colName, indexName string) int {
- prefix := core.NewCollectionIndexKey(usersColName, indexName)
+ prefix := core.NewCollectionIndexKey(immutable.Some(f.users.ID()), indexName)
q, err := f.txn.Systemstore().Query(f.ctx, query.Query{
Prefix: prefix.ToString(),
})
@@ -247,8 +266,8 @@ func (f *indexTestFixture) getAllIndexes() (map[client.CollectionName][]client.I
return f.db.getAllIndexes(f.ctx, f.txn)
}
-func (f *indexTestFixture) getCollectionIndexes(colName string) ([]client.IndexDescription, error) {
- return f.db.fetchCollectionIndexDescriptions(f.ctx, f.txn, colName)
+func (f *indexTestFixture) getCollectionIndexes(colID uint32) ([]client.IndexDescription, error) {
+ return f.db.fetchCollectionIndexDescriptions(f.ctx, f.txn, colID)
}
func TestCreateIndex_IfFieldsIsEmpty_ReturnError(t *testing.T) {
@@ -392,7 +411,7 @@ func TestCreateIndex_ShouldSaveToSystemStorage(t *testing.T) {
_, err := f.createCollectionIndex(desc)
assert.NoError(t, err)
- key := core.NewCollectionIndexKey(f.users.Name(), name)
+ key := core.NewCollectionIndexKey(immutable.Some(f.users.ID()), name)
data, err := f.txn.Systemstore().Get(f.ctx, key.ToDS())
assert.NoError(t, err)
var deserialized client.IndexDescription
@@ -441,7 +460,7 @@ func TestCreateIndex_WithMultipleCollectionsAndIndexes_AssignIncrementedIDPerCol
}
createIndexAndAssert := func(col client.Collection, fieldName string, expectedID uint32) {
- desc, err := f.createCollectionIndexFor(col.Name(), makeIndex(fieldName))
+ desc, err := f.createCollectionIndexFor(col.Name().Value(), makeIndex(fieldName))
require.NoError(t, err)
assert.Equal(t, expectedID, desc.ID)
seqKey := core.NewSequenceKey(fmt.Sprintf("%s/%d", core.COLLECTION_INDEX, col.ID()))
@@ -524,7 +543,7 @@ func TestCreateIndex_IfAttemptToIndexOnUnsupportedType_ReturnError(t *testing.T)
f.txn, err = f.db.NewTxn(f.ctx, false)
require.NoError(f.t, err)
- _, err = f.createCollectionIndexFor(collection.Name(), indexDesc)
+ _, err = f.createCollectionIndexFor(collection.Name().Value(), indexDesc)
require.ErrorIs(f.t, err, NewErrUnsupportedIndexFieldType(unsupportedKind))
}
@@ -562,7 +581,7 @@ func TestGetIndexes_IfInvalidIndexIsStored_ReturnError(t *testing.T) {
f := newIndexTestFixture(t)
defer f.db.Close()
- indexKey := core.NewCollectionIndexKey(usersColName, "users_name_index")
+ indexKey := core.NewCollectionIndexKey(immutable.Some(f.users.ID()), "users_name_index")
err := f.txn.Systemstore().Put(f.ctx, indexKey.ToDS(), []byte("invalid"))
assert.NoError(t, err)
@@ -574,7 +593,7 @@ func TestGetIndexes_IfInvalidIndexKeyIsStored_ReturnError(t *testing.T) {
f := newIndexTestFixture(t)
defer f.db.Close()
- indexKey := core.NewCollectionIndexKey(usersColName, "users_name_index")
+ indexKey := core.NewCollectionIndexKey(immutable.Some(f.users.ID()), "users_name_index")
key := ds.NewKey(indexKey.ToString() + "/invalid")
desc := client.IndexDescription{
Name: "some_index_name",
@@ -663,7 +682,7 @@ func TestGetCollectionIndexes_ShouldReturnListOfCollectionIndexes(t *testing.T)
_, err := f.createCollectionIndexFor(usersColName, usersIndexDesc)
assert.NoError(t, err)
- f.getProductsCollectionDesc()
+ products := f.getProductsCollectionDesc()
productsIndexDesc := client.IndexDescription{
Name: "products_description_index",
Fields: []client.IndexedFieldDescription{{Name: productsPriceFieldName}},
@@ -675,13 +694,13 @@ func TestGetCollectionIndexes_ShouldReturnListOfCollectionIndexes(t *testing.T)
_, err = f.createCollectionIndexFor(productsColName, productsIndexDesc)
assert.NoError(t, err)
- userIndexes, err := f.getCollectionIndexes(usersColName)
+ userIndexes, err := f.getCollectionIndexes(f.users.ID())
assert.NoError(t, err)
require.Equal(t, 1, len(userIndexes))
usersIndexDesc.ID = 1
assert.Equal(t, usersIndexDesc, userIndexes[0])
- productIndexes, err := f.getCollectionIndexes(productsColName)
+ productIndexes, err := f.getCollectionIndexes(products.ID())
assert.NoError(t, err)
require.Equal(t, 1, len(productIndexes))
productsIndexDesc.ID = 1
@@ -700,7 +719,7 @@ func TestGetCollectionIndexes_IfSystemStoreFails_ReturnError(t *testing.T) {
mockedTxn.EXPECT().Systemstore().Unset()
mockedTxn.EXPECT().Systemstore().Return(mockedTxn.MockSystemstore)
- _, err := f.getCollectionIndexes(usersColName)
+ _, err := f.getCollectionIndexes(f.users.ID())
assert.ErrorIs(t, err, testErr)
}
@@ -716,7 +735,7 @@ func TestGetCollectionIndexes_IfSystemStoreFails_ShouldCloseIterator(t *testing.
mockedTxn.EXPECT().Systemstore().Unset()
mockedTxn.EXPECT().Systemstore().Return(mockedTxn.MockSystemstore)
- _, _ = f.getCollectionIndexes(usersColName)
+ _, _ = f.getCollectionIndexes(f.users.ID())
}
func TestGetCollectionIndexes_IfSystemStoreQueryIteratorFails_ReturnError(t *testing.T) {
@@ -732,7 +751,7 @@ func TestGetCollectionIndexes_IfSystemStoreQueryIteratorFails_ReturnError(t *tes
mockedTxn.EXPECT().Systemstore().Unset()
mockedTxn.EXPECT().Systemstore().Return(mockedTxn.MockSystemstore)
- _, err := f.getCollectionIndexes(usersColName)
+ _, err := f.getCollectionIndexes(f.users.ID())
assert.ErrorIs(t, err, testErr)
}
@@ -740,11 +759,11 @@ func TestGetCollectionIndexes_IfInvalidIndexIsStored_ReturnError(t *testing.T) {
f := newIndexTestFixture(t)
defer f.db.Close()
- indexKey := core.NewCollectionIndexKey(usersColName, "users_name_index")
+ indexKey := core.NewCollectionIndexKey(immutable.Some(f.users.ID()), "users_name_index")
err := f.txn.Systemstore().Put(f.ctx, indexKey.ToDS(), []byte("invalid"))
assert.NoError(t, err)
- _, err = f.getCollectionIndexes(usersColName)
+ _, err = f.getCollectionIndexes(f.users.ID())
assert.ErrorIs(t, err, datastore.NewErrInvalidStoredValue(nil))
}
@@ -866,7 +885,6 @@ func TestCollectionGetIndexes_IfFailsToCreateTxn_ShouldNotCache(t *testing.T) {
func TestCollectionGetIndexes_IfStoredIndexWithUnsupportedType_ReturnError(t *testing.T) {
f := newIndexTestFixtureBare(t)
- f.addUsersCollection()
const unsupportedKind = client.FieldKind_BOOL_ARRAY
_, err := f.db.AddSchema(
@@ -1011,7 +1029,7 @@ func TestCollectionGetIndexes_ShouldReturnIndexesInOrderedByName(t *testing.T) {
},
}
- _, err := f.createCollectionIndexFor(collection.Name(), indexDesc)
+ _, err := f.createCollectionIndexFor(collection.Name().Value(), indexDesc)
require.NoError(t, err)
}
@@ -1032,7 +1050,7 @@ func TestDropIndex_ShouldDeleteIndex(t *testing.T) {
err := f.dropIndex(usersColName, desc.Name)
assert.NoError(t, err)
- indexKey := core.NewCollectionIndexKey(usersColName, desc.Name)
+ indexKey := core.NewCollectionIndexKey(immutable.Some(f.users.ID()), desc.Name)
_, err = f.txn.Systemstore().Get(f.ctx, indexKey.ToDS())
assert.Error(t, err)
}
@@ -1269,5 +1287,5 @@ func TestNewCollectionIndex_IfDescriptionHasNonExistingField_ReturnError(t *test
desc := getUsersIndexDescOnName()
desc.Fields[0].Name = "non_existing_field"
_, err := NewCollectionIndex(f.users, desc)
- require.ErrorIs(t, err, NewErrIndexDescHasNonExistingField(desc, desc.Fields[0].Name))
+ require.ErrorIs(t, err, client.NewErrFieldNotExist(desc.Fields[0].Name))
}
diff --git a/db/indexed_docs_test.go b/db/indexed_docs_test.go
index b7c7abbf9d..68d89bcde8 100644
--- a/db/indexed_docs_test.go
+++ b/db/indexed_docs_test.go
@@ -19,6 +19,7 @@ import (
ipfsDatastore "github.com/ipfs/go-datastore"
"github.com/ipfs/go-datastore/query"
+ "github.com/sourcenetwork/immutable"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/mock"
"github.com/stretchr/testify/require"
@@ -77,12 +78,12 @@ func (f *indexTestFixture) newProdDoc(id int, price float64, cat string, col cli
// The format of the non-unique index key is: "////"
// Example: "/5/1/12/bae-61cd6879-63ca-5ca9-8731-470a3c1dac69"
type indexKeyBuilder struct {
- f *indexTestFixture
- colName string
- fieldName string
- doc *client.Document
- values [][]byte
- isUnique bool
+ f *indexTestFixture
+ colName string
+ fieldsNames []string
+ doc *client.Document
+ values [][]byte
+ isUnique bool
}
func newIndexKeyBuilder(f *indexTestFixture) *indexKeyBuilder {
@@ -94,11 +95,11 @@ func (b *indexKeyBuilder) Col(colName string) *indexKeyBuilder {
return b
}
-// Field sets the field name for the index key.
+// Fields sets the fields names for the index key.
// If the field name is not set, the index key will contain only collection id.
// When building a key it will it will find the field id to use in the key.
-func (b *indexKeyBuilder) Field(fieldName string) *indexKeyBuilder {
- b.fieldName = fieldName
+func (b *indexKeyBuilder) Fields(fieldsNames ...string) *indexKeyBuilder {
+ b.fieldsNames = fieldsNames
return b
}
@@ -134,7 +135,7 @@ func (b *indexKeyBuilder) Build() core.IndexDataStoreKey {
require.NoError(b.f.t, err)
var collection client.Collection
for _, col := range cols {
- if col.Name() == b.colName {
+ if col.Name().Value() == b.colName {
collection = col
break
}
@@ -144,33 +145,41 @@ func (b *indexKeyBuilder) Build() core.IndexDataStoreKey {
}
key.CollectionID = collection.ID()
- if b.fieldName == "" {
+ if len(b.fieldsNames) == 0 {
return key
}
indexes, err := collection.GetIndexes(b.f.ctx)
require.NoError(b.f.t, err)
+indexLoop:
for _, index := range indexes {
- if index.Fields[0].Name == b.fieldName {
+ if len(index.Fields) == len(b.fieldsNames) {
+ for i := range index.Fields {
+ if index.Fields[i].Name != b.fieldsNames[i] {
+ continue indexLoop
+ }
+ }
key.IndexID = index.ID
- break
+ break indexLoop
}
}
if b.doc != nil {
- var fieldBytesVal []byte
- var fieldValue *client.FieldValue
- var err error
- if len(b.values) == 0 {
- fieldValue, err = b.doc.GetValue(b.fieldName)
+ for i, fieldName := range b.fieldsNames {
+ var fieldBytesVal []byte
+ var fieldValue *client.FieldValue
+ var err error
+ if len(b.values) <= i {
+ fieldValue, err = b.doc.GetValue(fieldName)
+ require.NoError(b.f.t, err)
+ } else {
+ fieldValue = client.NewFieldValue(client.LWW_REGISTER, b.values[i])
+ }
+ fieldBytesVal, err = fieldValue.Bytes()
require.NoError(b.f.t, err)
- } else {
- fieldValue = client.NewFieldValue(client.LWW_REGISTER, b.values[0])
+ key.FieldValues = append(key.FieldValues, fieldBytesVal)
}
- fieldBytesVal, err = fieldValue.Bytes()
- require.NoError(b.f.t, err)
- key.FieldValues = [][]byte{fieldBytesVal}
if !b.isUnique {
key.FieldValues = append(key.FieldValues, []byte(b.doc.ID().String()))
}
@@ -211,12 +220,15 @@ func (*indexTestFixture) resetSystemStoreStubs(systemStoreOn *mocks.DSReaderWrit
}
func (f *indexTestFixture) stubSystemStore(systemStoreOn *mocks.DSReaderWriter_Expecter) {
+ if f.users == nil {
+ f.users = f.addUsersCollection()
+ }
desc := getUsersIndexDescOnName()
desc.ID = 1
indexOnNameDescData, err := json.Marshal(desc)
require.NoError(f.t, err)
- colIndexKey := core.NewCollectionIndexKey(usersColName, "")
+ colIndexKey := core.NewCollectionIndexKey(immutable.Some(f.users.ID()), "")
matchPrefixFunc := func(q query.Query) bool {
return q.Prefix == colIndexKey.ToDS().String()
}
@@ -230,7 +242,7 @@ func (f *indexTestFixture) stubSystemStore(systemStoreOn *mocks.DSReaderWriter_E
systemStoreOn.Query(mock.Anything, mock.Anything).Maybe().
Return(mocks.NewQueryResultsWithValues(f.t), nil)
- colIndexOnNameKey := core.NewCollectionIndexKey(usersColName, testUsersColIndexName)
+ colIndexOnNameKey := core.NewCollectionIndexKey(immutable.Some(f.users.ID()), testUsersColIndexName)
systemStoreOn.Get(mock.Anything, colIndexOnNameKey.ToDS()).Maybe().Return(indexOnNameDescData, nil)
if f.users != nil {
@@ -255,7 +267,7 @@ func TestNonUnique_IfDocIsAdded_ShouldBeIndexed(t *testing.T) {
doc := f.newUserDoc("John", 21, f.users)
f.saveDocToCollection(doc, f.users)
- key := newIndexKeyBuilder(f).Col(usersColName).Field(usersNameFieldName).Doc(doc).Build()
+ key := newIndexKeyBuilder(f).Col(usersColName).Fields(usersNameFieldName).Doc(doc).Build()
data, err := f.txn.Datastore().Get(f.ctx, key.ToDS())
require.NoError(t, err)
@@ -268,7 +280,7 @@ func TestNonUnique_IfFailsToStoredIndexedDoc_Error(t *testing.T) {
f.createUserCollectionIndexOnName()
doc := f.newUserDoc("John", 21, f.users)
- key := newIndexKeyBuilder(f).Col(usersColName).Field(usersNameFieldName).Doc(doc).Build()
+ key := newIndexKeyBuilder(f).Col(usersColName).Fields(usersNameFieldName).Doc(doc).Build()
mockTxn := f.mockTxn()
@@ -345,7 +357,7 @@ func TestNonUnique_IfIndexIntField_StoreIt(t *testing.T) {
doc := f.newUserDoc("John", 21, f.users)
f.saveDocToCollection(doc, f.users)
- key := newIndexKeyBuilder(f).Col(usersColName).Field(usersAgeFieldName).Doc(doc).Build()
+ key := newIndexKeyBuilder(f).Col(usersColName).Fields(usersAgeFieldName).Doc(doc).Build()
data, err := f.txn.Datastore().Get(f.ctx, key.ToDS())
require.NoError(t, err)
@@ -357,9 +369,9 @@ func TestNonUnique_IfMultipleCollectionsWithIndexes_StoreIndexWithCollectionID(t
users := f.addUsersCollection()
products := f.getProductsCollectionDesc()
- _, err := f.createCollectionIndexFor(users.Name(), getUsersIndexDescOnName())
+ _, err := f.createCollectionIndexFor(users.Name().Value(), getUsersIndexDescOnName())
require.NoError(f.t, err)
- _, err = f.createCollectionIndexFor(products.Name(), getProductsIndexDescOnCategory())
+ _, err = f.createCollectionIndexFor(products.Name().Value(), getProductsIndexDescOnCategory())
require.NoError(f.t, err)
f.commitTxn()
@@ -372,8 +384,8 @@ func TestNonUnique_IfMultipleCollectionsWithIndexes_StoreIndexWithCollectionID(t
require.NoError(f.t, err)
f.commitTxn()
- userDocID := newIndexKeyBuilder(f).Col(usersColName).Field(usersNameFieldName).Doc(userDoc).Build()
- prodDocID := newIndexKeyBuilder(f).Col(productsColName).Field(productsCategoryFieldName).Doc(prodDoc).Build()
+ userDocID := newIndexKeyBuilder(f).Col(usersColName).Fields(usersNameFieldName).Doc(userDoc).Build()
+ prodDocID := newIndexKeyBuilder(f).Col(productsColName).Fields(productsCategoryFieldName).Doc(prodDoc).Build()
data, err := f.txn.Datastore().Get(f.ctx, userDocID.ToDS())
require.NoError(t, err)
@@ -392,8 +404,8 @@ func TestNonUnique_IfMultipleIndexes_StoreIndexWithIndexID(t *testing.T) {
doc := f.newUserDoc("John", 21, f.users)
f.saveDocToCollection(doc, f.users)
- nameKey := newIndexKeyBuilder(f).Col(usersColName).Field(usersNameFieldName).Doc(doc).Build()
- ageKey := newIndexKeyBuilder(f).Col(usersColName).Field(usersAgeFieldName).Doc(doc).Build()
+ nameKey := newIndexKeyBuilder(f).Col(usersColName).Fields(usersNameFieldName).Doc(doc).Build()
+ ageKey := newIndexKeyBuilder(f).Col(usersColName).Fields(usersAgeFieldName).Doc(doc).Build()
data, err := f.txn.Datastore().Get(f.ctx, nameKey.ToDS())
require.NoError(t, err)
@@ -505,7 +517,7 @@ func TestNonUnique_IfIndexedFieldIsNil_StoreItAsNil(t *testing.T) {
f.saveDocToCollection(doc, f.users)
- key := newIndexKeyBuilder(f).Col(usersColName).Field(usersNameFieldName).Doc(doc).
+ key := newIndexKeyBuilder(f).Col(usersColName).Fields(usersNameFieldName).Doc(doc).
Values([]byte(nil)).Build()
data, err := f.txn.Datastore().Get(f.ctx, key.ToDS())
@@ -524,8 +536,8 @@ func TestNonUniqueCreate_ShouldIndexExistingDocs(t *testing.T) {
f.createUserCollectionIndexOnName()
- key1 := newIndexKeyBuilder(f).Col(usersColName).Field(usersNameFieldName).Doc(doc1).Build()
- key2 := newIndexKeyBuilder(f).Col(usersColName).Field(usersNameFieldName).Doc(doc2).Build()
+ key1 := newIndexKeyBuilder(f).Col(usersColName).Fields(usersNameFieldName).Doc(doc1).Build()
+ key2 := newIndexKeyBuilder(f).Col(usersColName).Fields(usersNameFieldName).Doc(doc2).Build()
data, err := f.txn.Datastore().Get(f.ctx, key1.ToDS())
require.NoError(t, err, key1.ToString())
@@ -596,7 +608,7 @@ func TestNonUniqueCreate_IfUponIndexingExistingDocsFetcherFails_ReturnError(t *t
f.saveDocToCollection(doc, f.users)
f.users.(*collection).fetcherFactory = tc.PrepareFetcher
- key := newIndexKeyBuilder(f).Col(usersColName).Field(usersNameFieldName).Doc(doc).Build()
+ key := newIndexKeyBuilder(f).Col(usersColName).Fields(usersNameFieldName).Doc(doc).Build()
_, err := f.users.CreateIndex(f.ctx, getUsersIndexDescOnName())
require.ErrorIs(t, err, testError, tc.Name)
@@ -633,11 +645,11 @@ func TestNonUniqueCreate_IfDatastoreFailsToStoreIndex_ReturnError(t *testing.T)
func TestNonUniqueDrop_ShouldDeleteStoredIndexedFields(t *testing.T) {
f := newIndexTestFixtureBare(t)
users := f.addUsersCollection()
- _, err := f.createCollectionIndexFor(users.Name(), getUsersIndexDescOnName())
+ _, err := f.createCollectionIndexFor(users.Name().Value(), getUsersIndexDescOnName())
require.NoError(f.t, err)
- _, err = f.createCollectionIndexFor(users.Name(), getUsersIndexDescOnAge())
+ _, err = f.createCollectionIndexFor(users.Name().Value(), getUsersIndexDescOnAge())
require.NoError(f.t, err)
- _, err = f.createCollectionIndexFor(users.Name(), getUsersIndexDescOnWeight())
+ _, err = f.createCollectionIndexFor(users.Name().Value(), getUsersIndexDescOnWeight())
require.NoError(f.t, err)
f.commitTxn()
@@ -645,16 +657,16 @@ func TestNonUniqueDrop_ShouldDeleteStoredIndexedFields(t *testing.T) {
f.saveDocToCollection(f.newUserDoc("Islam", 23, users), users)
products := f.getProductsCollectionDesc()
- _, err = f.createCollectionIndexFor(products.Name(), getProductsIndexDescOnCategory())
+ _, err = f.createCollectionIndexFor(products.Name().Value(), getProductsIndexDescOnCategory())
require.NoError(f.t, err)
f.commitTxn()
f.saveDocToCollection(f.newProdDoc(1, 55, "games", products), products)
- userNameKey := newIndexKeyBuilder(f).Col(usersColName).Field(usersNameFieldName).Build()
- userAgeKey := newIndexKeyBuilder(f).Col(usersColName).Field(usersAgeFieldName).Build()
- userWeightKey := newIndexKeyBuilder(f).Col(usersColName).Field(usersWeightFieldName).Build()
- prodCatKey := newIndexKeyBuilder(f).Col(productsColName).Field(productsCategoryFieldName).Build()
+ userNameKey := newIndexKeyBuilder(f).Col(usersColName).Fields(usersNameFieldName).Build()
+ userAgeKey := newIndexKeyBuilder(f).Col(usersColName).Fields(usersAgeFieldName).Build()
+ userWeightKey := newIndexKeyBuilder(f).Col(usersColName).Fields(usersWeightFieldName).Build()
+ prodCatKey := newIndexKeyBuilder(f).Col(productsColName).Fields(productsCategoryFieldName).Build()
err = f.dropIndex(usersColName, testUsersColIndexAge)
require.NoError(f.t, err)
@@ -695,7 +707,7 @@ func TestNonUniqueUpdate_ShouldDeleteOldValueAndStoreNewOne(t *testing.T) {
f.saveDocToCollection(doc, f.users)
for _, tc := range cases {
- oldKey := newIndexKeyBuilder(f).Col(usersColName).Field(usersNameFieldName).Doc(doc).Build()
+ oldKey := newIndexKeyBuilder(f).Col(usersColName).Fields(usersNameFieldName).Doc(doc).Build()
err := doc.Set(usersNameFieldName, tc.NewValue)
require.NoError(t, err)
@@ -703,7 +715,7 @@ func TestNonUniqueUpdate_ShouldDeleteOldValueAndStoreNewOne(t *testing.T) {
require.NoError(t, err)
f.commitTxn()
- newKey := newIndexKeyBuilder(f).Col(usersColName).Field(usersNameFieldName).Doc(doc).Build()
+ newKey := newIndexKeyBuilder(f).Col(usersColName).Fields(usersNameFieldName).Doc(doc).Build()
_, err = f.txn.Datastore().Get(f.ctx, oldKey.ToDS())
require.Error(t, err)
@@ -810,14 +822,14 @@ func TestNonUniqueUpdate_IfFetcherFails_ReturnError(t *testing.T) {
f.saveDocToCollection(doc, f.users)
f.users.(*collection).fetcherFactory = tc.PrepareFetcher
- oldKey := newIndexKeyBuilder(f).Col(usersColName).Field(usersNameFieldName).Doc(doc).Build()
+ oldKey := newIndexKeyBuilder(f).Col(usersColName).Fields(usersNameFieldName).Doc(doc).Build()
err := doc.Set(usersNameFieldName, "Islam")
require.NoError(t, err, tc.Name)
err = f.users.Update(f.ctx, doc)
require.Error(t, err, tc.Name)
- newKey := newIndexKeyBuilder(f).Col(usersColName).Field(usersNameFieldName).Doc(doc).Build()
+ newKey := newIndexKeyBuilder(f).Col(usersColName).Fields(usersNameFieldName).Doc(doc).Build()
_, err = f.txn.Datastore().Get(f.ctx, oldKey.ToDS())
require.NoError(t, err, tc.Name)
@@ -835,7 +847,7 @@ func TestNonUniqueUpdate_IfFailsToUpdateIndex_ReturnError(t *testing.T) {
f.saveDocToCollection(doc, f.users)
f.commitTxn()
- validKey := newIndexKeyBuilder(f).Col(usersColName).Field(usersAgeFieldName).Doc(doc).Build()
+ validKey := newIndexKeyBuilder(f).Col(usersColName).Fields(usersAgeFieldName).Doc(doc).Build()
err := f.txn.Datastore().Delete(f.ctx, validKey.ToDS())
require.NoError(f.t, err)
f.commitTxn()
@@ -956,7 +968,7 @@ func TestNonUpdate_IfIndexedFieldWasNil_ShouldDeleteIt(t *testing.T) {
f.saveDocToCollection(doc, f.users)
- oldKey := newIndexKeyBuilder(f).Col(usersColName).Field(usersNameFieldName).Doc(doc).
+ oldKey := newIndexKeyBuilder(f).Col(usersColName).Fields(usersNameFieldName).Doc(doc).
Values([]byte(nil)).Build()
err = doc.Set(usersNameFieldName, "John")
@@ -966,7 +978,7 @@ func TestNonUpdate_IfIndexedFieldWasNil_ShouldDeleteIt(t *testing.T) {
require.NoError(f.t, err)
f.commitTxn()
- newKey := newIndexKeyBuilder(f).Col(usersColName).Field(usersNameFieldName).Doc(doc).Build()
+ newKey := newIndexKeyBuilder(f).Col(usersColName).Fields(usersNameFieldName).Doc(doc).Build()
_, err = f.txn.Datastore().Get(f.ctx, newKey.ToDS())
require.NoError(t, err)
@@ -1017,8 +1029,8 @@ func TestUniqueCreate_ShouldIndexExistingDocs(t *testing.T) {
f.createUserCollectionUniqueIndexOnName()
- key1 := newIndexKeyBuilder(f).Col(usersColName).Field(usersNameFieldName).Unique().Doc(doc1).Build()
- key2 := newIndexKeyBuilder(f).Col(usersColName).Field(usersNameFieldName).Unique().Doc(doc2).Build()
+ key1 := newIndexKeyBuilder(f).Col(usersColName).Fields(usersNameFieldName).Unique().Doc(doc1).Build()
+ key2 := newIndexKeyBuilder(f).Col(usersColName).Fields(usersNameFieldName).Unique().Doc(doc2).Build()
data, err := f.txn.Datastore().Get(f.ctx, key1.ToDS())
require.NoError(t, err, key1.ToString())
@@ -1043,7 +1055,7 @@ func TestUnique_IfIndexedFieldIsNil_StoreItAsNil(t *testing.T) {
f.saveDocToCollection(doc, f.users)
- key := newIndexKeyBuilder(f).Col(usersColName).Field(usersNameFieldName).Unique().Doc(doc).
+ key := newIndexKeyBuilder(f).Col(usersColName).Fields(usersNameFieldName).Unique().Doc(doc).
Values([]byte(nil)).Build()
data, err := f.txn.Datastore().Get(f.ctx, key.ToDS())
@@ -1054,17 +1066,17 @@ func TestUnique_IfIndexedFieldIsNil_StoreItAsNil(t *testing.T) {
func TestUniqueDrop_ShouldDeleteStoredIndexedFields(t *testing.T) {
f := newIndexTestFixtureBare(t)
users := f.addUsersCollection()
- _, err := f.createCollectionIndexFor(users.Name(), makeUnique(getUsersIndexDescOnName()))
+ _, err := f.createCollectionIndexFor(users.Name().Value(), makeUnique(getUsersIndexDescOnName()))
require.NoError(f.t, err)
- _, err = f.createCollectionIndexFor(users.Name(), makeUnique(getUsersIndexDescOnAge()))
+ _, err = f.createCollectionIndexFor(users.Name().Value(), makeUnique(getUsersIndexDescOnAge()))
require.NoError(f.t, err)
f.commitTxn()
f.saveDocToCollection(f.newUserDoc("John", 21, users), users)
f.saveDocToCollection(f.newUserDoc("Islam", 23, users), users)
- userNameKey := newIndexKeyBuilder(f).Col(usersColName).Field(usersNameFieldName).Build()
- userAgeKey := newIndexKeyBuilder(f).Col(usersColName).Field(usersAgeFieldName).Build()
+ userNameKey := newIndexKeyBuilder(f).Col(usersColName).Fields(usersNameFieldName).Build()
+ userAgeKey := newIndexKeyBuilder(f).Col(usersColName).Fields(usersAgeFieldName).Build()
err = f.dropIndex(usersColName, testUsersColIndexAge)
require.NoError(f.t, err)
@@ -1103,7 +1115,7 @@ func TestUniqueUpdate_ShouldDeleteOldValueAndStoreNewOne(t *testing.T) {
f.saveDocToCollection(doc, f.users)
for _, tc := range cases {
- oldKey := newIndexKeyBuilder(f).Col(usersColName).Field(usersNameFieldName).Unique().Doc(doc).Build()
+ oldKey := newIndexKeyBuilder(f).Col(usersColName).Fields(usersNameFieldName).Unique().Doc(doc).Build()
err := doc.Set(usersNameFieldName, tc.NewValue)
require.NoError(t, err)
@@ -1111,11 +1123,146 @@ func TestUniqueUpdate_ShouldDeleteOldValueAndStoreNewOne(t *testing.T) {
require.NoError(t, err)
f.commitTxn()
- newKey := newIndexKeyBuilder(f).Col(usersColName).Field(usersNameFieldName).Unique().Doc(doc).Build()
+ newKey := newIndexKeyBuilder(f).Col(usersColName).Fields(usersNameFieldName).Unique().Doc(doc).Build()
+
+ _, err = f.txn.Datastore().Get(f.ctx, oldKey.ToDS())
+ require.Error(t, err)
+ _, err = f.txn.Datastore().Get(f.ctx, newKey.ToDS())
+ require.NoError(t, err)
+ }
+}
+
+func TestCompositeCreate_ShouldIndexExistingDocs(t *testing.T) {
+ f := newIndexTestFixture(t)
+ defer f.db.Close()
+
+ doc1 := f.newUserDoc("John", 21, f.users)
+ f.saveDocToCollection(doc1, f.users)
+ doc2 := f.newUserDoc("Islam", 18, f.users)
+ f.saveDocToCollection(doc2, f.users)
+
+ f.createUserCollectionIndexOnNameAndAge()
+
+ key1 := newIndexKeyBuilder(f).Col(usersColName).Fields(usersNameFieldName, usersAgeFieldName).Doc(doc1).Build()
+ key2 := newIndexKeyBuilder(f).Col(usersColName).Fields(usersNameFieldName, usersAgeFieldName).Doc(doc2).Build()
+
+ ds := f.txn.Datastore()
+ data, err := ds.Get(f.ctx, key1.ToDS())
+ require.NoError(t, err, key1.ToString())
+ assert.Len(t, data, 0)
+ data, err = f.txn.Datastore().Get(f.ctx, key2.ToDS())
+ require.NoError(t, err)
+ assert.Len(t, data, 0)
+}
+
+func TestComposite_IfIndexedFieldIsNil_StoreItAsNil(t *testing.T) {
+ f := newIndexTestFixture(t)
+ defer f.db.Close()
+ f.createUserCollectionIndexOnNameAndAge()
+
+ docJSON, err := json.Marshal(struct {
+ Age int `json:"age"`
+ }{Age: 44})
+ require.NoError(f.t, err)
+
+ doc, err := client.NewDocFromJSON(docJSON, f.users.Schema())
+ require.NoError(f.t, err)
+
+ f.saveDocToCollection(doc, f.users)
+
+ key := newIndexKeyBuilder(f).Col(usersColName).Fields(usersNameFieldName, usersAgeFieldName).Doc(doc).
+ Values([]byte(nil)).Build()
+
+ data, err := f.txn.Datastore().Get(f.ctx, key.ToDS())
+ require.NoError(t, err)
+ assert.Len(t, data, 0)
+}
+
+func TestCompositeDrop_ShouldDeleteStoredIndexedFields(t *testing.T) {
+ f := newIndexTestFixtureBare(t)
+ users := f.addUsersCollection()
+ _, err := f.createCollectionIndexFor(users.Name().Value(), addFieldToIndex(getUsersIndexDescOnName(), usersAgeFieldName))
+ require.NoError(f.t, err)
+ _, err = f.createCollectionIndexFor(users.Name().Value(), addFieldToIndex(getUsersIndexDescOnAge(), usersWeightFieldName))
+ require.NoError(f.t, err)
+ f.commitTxn()
+
+ f.saveDocToCollection(f.newUserDoc("John", 21, users), users)
+ f.saveDocToCollection(f.newUserDoc("Islam", 23, users), users)
+
+ userNameAgeKey := newIndexKeyBuilder(f).Col(usersColName).Fields(usersNameFieldName, usersAgeFieldName).Build()
+ userAgeWeightKey := newIndexKeyBuilder(f).Col(usersColName).Fields(usersAgeFieldName, usersWeightFieldName).Build()
+
+ err = f.dropIndex(usersColName, testUsersColIndexAge)
+ require.NoError(f.t, err)
+
+ assert.Len(t, f.getPrefixFromDataStore(userNameAgeKey.ToString()), 2)
+ assert.Len(t, f.getPrefixFromDataStore(userAgeWeightKey.ToString()), 0)
+}
+
+func TestCompositeUpdate_ShouldDeleteOldValueAndStoreNewOne(t *testing.T) {
+ f := newIndexTestFixture(t)
+ defer f.db.Close()
+ f.createUserCollectionIndexOnNameAndAge()
+
+ cases := []struct {
+ Name string
+ Field string
+ NewValue any
+ Exec func(doc *client.Document) error
+ }{
+ {
+ Name: "update first",
+ NewValue: "Islam",
+ Field: usersNameFieldName,
+ Exec: func(doc *client.Document) error {
+ return f.users.Update(f.ctx, doc)
+ },
+ },
+ {
+ Name: "save first",
+ NewValue: "Andy",
+ Field: usersNameFieldName,
+ Exec: func(doc *client.Document) error {
+ return f.users.Save(f.ctx, doc)
+ },
+ },
+ {
+ Name: "update second",
+ NewValue: 33,
+ Field: usersAgeFieldName,
+ Exec: func(doc *client.Document) error {
+ return f.users.Update(f.ctx, doc)
+ },
+ },
+ {
+ Name: "save second",
+ NewValue: 36,
+ Field: usersAgeFieldName,
+ Exec: func(doc *client.Document) error {
+ return f.users.Save(f.ctx, doc)
+ },
+ },
+ }
+
+ doc := f.newUserDoc("John", 21, f.users)
+ f.saveDocToCollection(doc, f.users)
+
+ for _, tc := range cases {
+ oldKey := newIndexKeyBuilder(f).Col(usersColName).Fields(usersNameFieldName, usersAgeFieldName).Doc(doc).Build()
+
+ err := doc.Set(tc.Field, tc.NewValue)
+ require.NoError(t, err)
+ err = tc.Exec(doc)
+ require.NoError(t, err)
+ f.commitTxn()
+
+ newKey := newIndexKeyBuilder(f).Col(usersColName).Fields(usersNameFieldName, usersAgeFieldName).Doc(doc).Build()
_, err = f.txn.Datastore().Get(f.ctx, oldKey.ToDS())
require.Error(t, err)
_, err = f.txn.Datastore().Get(f.ctx, newKey.ToDS())
require.NoError(t, err)
+ f.commitTxn()
}
}
diff --git a/db/schema.go b/db/schema.go
index 988aea5e17..5aaac38b1c 100644
--- a/db/schema.go
+++ b/db/schema.go
@@ -39,16 +39,6 @@ func (db *db) addSchema(
txn datastore.Txn,
schemaString string,
) ([]client.CollectionDescription, error) {
- existingCollections, err := db.getAllCollections(ctx, txn)
- if err != nil {
- return nil, err
- }
-
- existingDefinitions := make([]client.CollectionDefinition, len(existingCollections))
- for i := range existingCollections {
- existingDefinitions[i] = existingCollections[i].Definition()
- }
-
newDefinitions, err := db.parser.ParseSDL(ctx, schemaString)
if err != nil {
return nil, err
diff --git a/db/view.go b/db/view.go
index 2b4666df22..2a61ff63af 100644
--- a/db/view.go
+++ b/db/view.go
@@ -57,12 +57,13 @@ func (db *db) addView(
}
for i := range newDefinitions {
- newDefinitions[i].Description.BaseQuery = baseQuery
+ source := client.QuerySource{Query: *baseQuery}
+ newDefinitions[i].Description.Sources = append(newDefinitions[i].Description.Sources, &source)
}
returnDescriptions := make([]client.CollectionDefinition, len(newDefinitions))
for i, definition := range newDefinitions {
- if definition.Description.Name == "" {
+ if !definition.Description.Name.HasValue() {
schema, err := description.CreateSchemaVersion(ctx, txn, definition.Schema)
if err != nil {
return nil, err
diff --git a/docs/data_format_changes/i2198-sec-index-key-change.md b/docs/data_format_changes/i2198-sec-index-key-change.md
new file mode 100644
index 0000000000..8e372aa6ac
--- /dev/null
+++ b/docs/data_format_changes/i2198-sec-index-key-change.md
@@ -0,0 +1,3 @@
+# Index secondary indexes by collection id
+
+Secondary indexes are now indexed by collection ID instead of collection name.
\ No newline at end of file
diff --git a/go.mod b/go.mod
index f9de929232..1fa9bfdbf6 100644
--- a/go.mod
+++ b/go.mod
@@ -1,13 +1,13 @@
module github.com/sourcenetwork/defradb
-go 1.20
+go 1.21
require (
github.com/bits-and-blooms/bitset v1.13.0
github.com/bxcodec/faker v2.0.1+incompatible
- github.com/evanphx/json-patch/v5 v5.7.0
+ github.com/evanphx/json-patch/v5 v5.8.1
github.com/fxamacker/cbor/v2 v2.5.0
- github.com/getkin/kin-openapi v0.122.0
+ github.com/getkin/kin-openapi v0.123.0
github.com/go-chi/chi/v5 v5.0.11
github.com/go-chi/cors v1.2.1
github.com/go-errors/errors v1.5.1
@@ -43,8 +43,8 @@ require (
github.com/ugorji/go/codec v1.2.12
github.com/valyala/fastjson v1.6.4
github.com/vito/go-sse v1.0.0
- go.opentelemetry.io/otel/metric v1.21.0
- go.opentelemetry.io/otel/sdk/metric v1.21.0
+ go.opentelemetry.io/otel/metric v1.22.0
+ go.opentelemetry.io/otel/sdk/metric v1.22.0
go.uber.org/zap v1.26.0
golang.org/x/crypto v0.18.0
golang.org/x/exp v0.0.0-20240103183307-be819d1f06fc
@@ -75,8 +75,8 @@ require (
github.com/fsnotify/fsnotify v1.7.0 // indirect
github.com/go-logr/logr v1.4.1 // indirect
github.com/go-logr/stdr v1.2.2 // indirect
- github.com/go-openapi/jsonpointer v0.19.6 // indirect
- github.com/go-openapi/swag v0.22.4 // indirect
+ github.com/go-openapi/jsonpointer v0.20.2 // indirect
+ github.com/go-openapi/swag v0.22.8 // indirect
github.com/go-task/slim-sprig v0.0.0-20230315185526-52ccab3ef572 // indirect
github.com/godbus/dbus/v5 v5.1.0 // indirect
github.com/gogo/protobuf v1.3.2 // indirect
@@ -175,9 +175,9 @@ require (
github.com/whyrusleeping/go-keyspace v0.0.0-20160322163242-5b898ac5add1 // indirect
github.com/x448/float16 v0.8.4 // indirect
go.opencensus.io v0.24.0 // indirect
- go.opentelemetry.io/otel v1.21.0 // indirect
- go.opentelemetry.io/otel/sdk v1.21.0 // indirect
- go.opentelemetry.io/otel/trace v1.21.0 // indirect
+ go.opentelemetry.io/otel v1.22.0 // indirect
+ go.opentelemetry.io/otel/sdk v1.22.0 // indirect
+ go.opentelemetry.io/otel/trace v1.22.0 // indirect
go.uber.org/dig v1.17.1 // indirect
go.uber.org/fx v1.20.1 // indirect
go.uber.org/mock v0.4.0 // indirect
diff --git a/go.sum b/go.sum
index f70ba2aaaf..572e174acd 100644
--- a/go.sum
+++ b/go.sum
@@ -8,11 +8,14 @@ dmitri.shuralyov.com/service/change v0.0.0-20181023043359-a85b471d5412/go.mod h1
dmitri.shuralyov.com/state v0.0.0-20180228185332-28bcc343414c/go.mod h1:0PRwlb0D6DFvNNtx+9ybjezNCa8XF0xaYcETyp6rHWU=
git.apache.org/thrift.git v0.0.0-20180902110319-2566ecd5d999/go.mod h1:fPE2ZNJGynbRyZ4dJvy6G277gSllfV2HJqblrnkyeyg=
github.com/AndreasBriese/bbloom v0.0.0-20190825152654-46b345b51c96 h1:cTp8I5+VIoKjsnZuH8vjyaysT/ses3EvZeaV/1UkF2M=
+github.com/AndreasBriese/bbloom v0.0.0-20190825152654-46b345b51c96/go.mod h1:bOvUY6CB00SOBii9/FifXqc0awNKxLFCL/+pkDPuyl8=
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
github.com/DataDog/zstd v1.4.1 h1:3oxKN3wbHibqx897utPC2LTQU4J+IHWWJO+glkAkpFM=
+github.com/DataDog/zstd v1.4.1/go.mod h1:1jcaCB/ufaK+sKp1NBhlGmpz41jOoPQ35bpF36t7BBo=
github.com/Jorropo/jsync v1.0.1 h1:6HgRolFZnsdfzRUj+ImB9og1JYOxQoReSywkHOGSaUU=
github.com/Jorropo/jsync v1.0.1/go.mod h1:jCOZj3vrBCri3bSU3ErUYvevKlnbssrXeCivybS5ABQ=
github.com/alecthomas/units v0.0.0-20231202071711-9a357b53e9c9 h1:ez/4by2iGztzR4L0zgAOR8lTQK9VlyBVVd7G4omaOQs=
+github.com/alecthomas/units v0.0.0-20231202071711-9a357b53e9c9/go.mod h1:OMCwj8VM1Kc9e19TLln2VL61YJF0x1XFtfdL4JdbSyE=
github.com/anmitsu/go-shlex v0.0.0-20161002113705-648efa622239/go.mod h1:2FmKhYUyUczH0OGQWaF5ceTx0UBShxjsH6f8oGKYe2c=
github.com/benbjohnson/clock v1.1.0/go.mod h1:J11/hYXuz8f4ySSvYwY0FKfm+ezbsZBKZxNJlLklBHA=
github.com/benbjohnson/clock v1.3.0/go.mod h1:J11/hYXuz8f4ySSvYwY0FKfm+ezbsZBKZxNJlLklBHA=
@@ -31,6 +34,7 @@ github.com/bytecodealliance/wasmtime-go/v15 v15.0.0 h1:4R2MpSPPbtSxqdsOTvsMn1pnw
github.com/bytecodealliance/wasmtime-go/v15 v15.0.0/go.mod h1:m6vB/SsM+pnJkVHmO1wzHYUeYtciltTKuxuvkR8pYcY=
github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
github.com/cespare/xxhash v1.1.0 h1:a6HrQnmkObjyL+Gs60czilIUGqrzKutQD6XZog3p+ko=
+github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc=
github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
github.com/cespare/xxhash/v2 v2.2.0 h1:DC2CZ1Ep5Y4k3ZQ899DldepgrayRUGE6BBZ/cd9Cj44=
github.com/cespare/xxhash/v2 v2.2.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
@@ -49,7 +53,7 @@ github.com/cpuguy83/go-md2man/v2 v2.0.0/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsr
github.com/cpuguy83/go-md2man/v2 v2.0.3 h1:qMCsGGgs+MAzDFyp9LpAe1Lqy/fY/qCovCm0qnXZOBM=
github.com/cpuguy83/go-md2man/v2 v2.0.3/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
github.com/crackcomm/go-gitignore v0.0.0-20231225121904-e25f5bc08668 h1:ZFUue+PNxmHlu7pYv+IYMtqlaO/0VwaGEqKepZf9JpA=
-github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
+github.com/crackcomm/go-gitignore v0.0.0-20231225121904-e25f5bc08668/go.mod h1:p1d6YEZWvFzEh4KLyvBcVSnrfNDDvK2zfK/4x2v/4pE=
github.com/cskr/pubsub v1.0.2 h1:vlOzMhl6PFn60gRlTQQsIfVwaPB/B/8MziK8FhEPt/0=
github.com/cskr/pubsub v1.0.2/go.mod h1:/8MzYXk/NJAz782G8RPkFzXTZVu63VotefPnR9TIRis=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
@@ -59,10 +63,13 @@ github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8Yc
github.com/davidlazar/go-crypto v0.0.0-20200604182044-b73af7476f6c h1:pFUpOrbxDR6AkioZ1ySsx5yxlDQZ8stG2b88gTPxgJU=
github.com/davidlazar/go-crypto v0.0.0-20200604182044-b73af7476f6c/go.mod h1:6UhI8N9EjYm1c2odKpFpAYeR8dsBeM7PtzQhRgxRr9U=
github.com/decred/dcrd/crypto/blake256 v1.0.1 h1:7PltbUIQB7u/FfZ39+DGa/ShuMyJ5ilcvdfma9wOH6Y=
+github.com/decred/dcrd/crypto/blake256 v1.0.1/go.mod h1:2OfgNZ5wDpcsFmHmCK5gZTPcCXqlm2ArzUIkw9czNJo=
github.com/decred/dcrd/dcrec/secp256k1/v4 v4.2.0 h1:8UrgZ3GkP4i/CLijOJx79Yu+etlyjdBU4sfcs2WYQMs=
github.com/decred/dcrd/dcrec/secp256k1/v4 v4.2.0/go.mod h1:v57UDF4pDQJcEfFUCRop3lJL149eHGSe9Jvczhzjo/0=
github.com/dgraph-io/badger v1.6.2 h1:mNw0qs90GVgGGWylh0umH5iag1j6n/PeJtNvL6KY/x8=
+github.com/dgraph-io/badger v1.6.2/go.mod h1:JW2yswe3V058sS0kZ2h/AXeDSqFjxnZcRrVH//y2UQE=
github.com/dgraph-io/badger/v3 v3.2011.1 h1:Hmyof0WMEF/QtutX5SQHzIMnJQxb/IrSzhjckV2SD6g=
+github.com/dgraph-io/badger/v3 v3.2011.1/go.mod h1:0rLLrQpKVQAL0or/lBLMQznhr6dWWX7h5AKnmnqx268=
github.com/dgraph-io/ristretto v0.1.1 h1:6CWw5tJNgpegArSHpNHJKldNeq03FQCwYvfMVWajOK8=
github.com/dgraph-io/ristretto v0.1.1/go.mod h1:S1GPSBCYCIhmVNfcth17y2zZtQT6wzkzgwUve0VDWWA=
github.com/dgryski/go-farm v0.0.0-20190423205320-6a90982ecee2 h1:tdlZCpZ/P9DhczCTSixgIKmwPv6+wP5DGjqLYw5SUiA=
@@ -80,22 +87,23 @@ github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymF
github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98=
github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c=
-github.com/evanphx/json-patch/v5 v5.7.0 h1:nJqP7uwL84RJInrohHfW0Fx3awjbm8qZeFv0nW9SYGc=
-github.com/evanphx/json-patch/v5 v5.7.0/go.mod h1:VNkHZ/282BpEyt/tObQO8s5CMPmYYq14uClGH4abBuQ=
+github.com/evanphx/json-patch/v5 v5.8.1 h1:iPEdwg0XayoS+E7Mth9JxwUtOgyVxnDTXHtKhZPlZxA=
+github.com/evanphx/json-patch/v5 v5.8.1/go.mod h1:VNkHZ/282BpEyt/tObQO8s5CMPmYYq14uClGH4abBuQ=
github.com/flynn/go-shlex v0.0.0-20150515145356-3f9db97f8568/go.mod h1:xEzjJPgXI435gkrCt3MPfRiAkVrwSbHsst4LCFVfpJc=
github.com/flynn/noise v1.0.1 h1:vPp/jdQLXC6ppsXSj/pM3W1BIJ5FEHE2TulSJBpb43Y=
github.com/flynn/noise v1.0.1/go.mod h1:xbMo+0i6+IGbYdJhF31t2eR1BIU0CYc12+BNAKwUTag=
github.com/francoispqt/gojay v1.2.13 h1:d2m3sFjloqoIUQU3TsHBgj6qg/BVGlTBeHDUmyJnXKk=
github.com/francoispqt/gojay v1.2.13/go.mod h1:ehT5mTG4ua4581f1++1WLG0vPdaA9HaiDsoyrBGkyDY=
github.com/frankban/quicktest v1.14.6 h1:7Xjx+VpznH+oBnejlPUj8oUpdxnVs4f8XU8WnHkI4W8=
+github.com/frankban/quicktest v1.14.6/go.mod h1:4ptaffx2x8+WTWXmUCuVU6aPUX1/Mz7zb5vbUoiM6w0=
github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo=
github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ=
github.com/fsnotify/fsnotify v1.7.0 h1:8JEhPFa5W2WU7YfeZzPNqzMP6Lwt7L2715Ggo0nosvA=
github.com/fsnotify/fsnotify v1.7.0/go.mod h1:40Bi/Hjc2AVfZrqy+aj+yEI+/bRxZnMJyTJwOpGvigM=
github.com/fxamacker/cbor/v2 v2.5.0 h1:oHsG0V/Q6E/wqTS2O1Cozzsy69nqCiguo5Q1a1ADivE=
github.com/fxamacker/cbor/v2 v2.5.0/go.mod h1:TA1xS00nchWmaBnEIxPSE5oHLuJBAVvqrtAnWBwBCVo=
-github.com/getkin/kin-openapi v0.122.0 h1:WB9Jbl0Hp/T79/JF9xlSW5Kl9uYdk/AWD0yAd9HOM10=
-github.com/getkin/kin-openapi v0.122.0/go.mod h1:PCWw/lfBrJY4HcdqE3jj+QFkaFK8ABoqo7PvqVhXXqw=
+github.com/getkin/kin-openapi v0.123.0 h1:zIik0mRwFNLyvtXK274Q6ut+dPh6nlxBp0x7mNrPhs8=
+github.com/getkin/kin-openapi v0.123.0/go.mod h1:wb1aSZA/iWmorQP9KTAS/phLj/t17B5jT7+fS8ed9NM=
github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04=
github.com/gliderlabs/ssh v0.1.1/go.mod h1:U7qILu1NlMHj9FlMhZLlkCdDnU1DBEAqr0aevW3Awn0=
github.com/go-chi/chi/v5 v5.0.11 h1:BnpYbFZ3T3S1WMpD79r7R5ThWX40TaFB7L31Y8xqSwA=
@@ -110,15 +118,15 @@ github.com/go-logr/logr v1.4.1 h1:pKouT5E8xu9zeFC39JXRDukb6JFQPXM5p5I91188VAQ=
github.com/go-logr/logr v1.4.1/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY=
github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag=
github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE=
-github.com/go-openapi/jsonpointer v0.19.6 h1:eCs3fxoIi3Wh6vtgmLTOjdhSpiqphQ+DaPn38N2ZdrE=
-github.com/go-openapi/jsonpointer v0.19.6/go.mod h1:osyAmYz/mB/C3I+WsTTSgw1ONzaLJoLCyoi6/zppojs=
-github.com/go-openapi/swag v0.22.3/go.mod h1:UzaqsxGiab7freDnrUUra0MwWfN/q7tE4j+VcZ0yl14=
-github.com/go-openapi/swag v0.22.4 h1:QLMzNJnMGPRNDCbySlcj1x01tzU8/9LTTL9hZZZogBU=
-github.com/go-openapi/swag v0.22.4/go.mod h1:UzaqsxGiab7freDnrUUra0MwWfN/q7tE4j+VcZ0yl14=
+github.com/go-openapi/jsonpointer v0.20.2 h1:mQc3nmndL8ZBzStEo3JYF8wzmeWffDH4VbXz58sAx6Q=
+github.com/go-openapi/jsonpointer v0.20.2/go.mod h1:bHen+N0u1KEO3YlmqOjTT9Adn1RfD91Ar825/PuiRVs=
+github.com/go-openapi/swag v0.22.8 h1:/9RjDSQ0vbFR+NyjGMkFTsA1IA0fmhKSThmfGZjicbw=
+github.com/go-openapi/swag v0.22.8/go.mod h1:6QT22icPLEqAM/z/TChgb4WAveCHF92+2gF0CNjHpPI=
github.com/go-task/slim-sprig v0.0.0-20210107165309-348f09dbbbc0/go.mod h1:fyg7847qk6SyHyPtNmDHnmrv/HOrqktSC+C9fM+CJOE=
github.com/go-task/slim-sprig v0.0.0-20230315185526-52ccab3ef572 h1:tfuBGBXKqDEevZMzYi5KSi8KkcZtzBcTgAUUtapy0OI=
github.com/go-task/slim-sprig v0.0.0-20230315185526-52ccab3ef572/go.mod h1:9Pwr4B2jHnOSGXyyzV8ROjYa2ojvAY6HCGYYfMoC3Ls=
github.com/go-test/deep v1.0.8 h1:TDsG77qcSprGbC6vTN8OuXp5g+J+b5Pcguhf7Zt61VM=
+github.com/go-test/deep v1.0.8/go.mod h1:5C2ZWiW0ErCdrYzpqxLbTX7MG14M9iiw8DgHncVwcsE=
github.com/go-yaml/yaml v2.1.0+incompatible/go.mod h1:w2MrLa16VYP0jy6N7M5kHaCkaLENm+P+Tv+MfurjSw0=
github.com/godbus/dbus/v5 v5.0.3/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA=
github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA=
@@ -167,6 +175,7 @@ github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/
github.com/google/go-cmp v0.5.3/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI=
+github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
github.com/google/go-github v17.0.0+incompatible/go.mod h1:zLgOLi98H3fifZn+44m+umXrS52loVEgC2AApnigrVQ=
github.com/google/go-querystring v1.0.0/go.mod h1:odCYkC5MyYFN7vkCjXpyrEuKhc/BUO6wN/zVPAxq5ck=
github.com/google/gopacket v1.1.19 h1:ves8RnFZPGiFnTS0uPQStjwru6uO6h+nlr9j6fL7kF8=
@@ -183,6 +192,7 @@ github.com/googleapis/gax-go v2.0.0+incompatible/go.mod h1:SFVmujtThgffbyetf+mdk
github.com/googleapis/gax-go/v2 v2.0.3/go.mod h1:LLvjysVCY1JZeum8Z6l8qUty8fiNwE08qbEPm1M08qg=
github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY=
github.com/gopherjs/gopherjs v0.0.0-20190812055157-5d271430af9f h1:KMlcu9X58lhTA/KrfX8Bi1LQSO4pzoVjTiL3h4Jk+Zk=
+github.com/gopherjs/gopherjs v0.0.0-20190812055157-5d271430af9f/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY=
github.com/gorilla/websocket v1.5.0 h1:PPwGk2jz7EePpoHN/+ClbZu8SPxiqlu12wZP/3sWmnc=
github.com/gorilla/websocket v1.5.0/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
github.com/gregjones/httpcache v0.0.0-20180305231024-9cad4c3443a7/go.mod h1:FecbI9+v66THATjSRHfNgh1IVFe/9kFxbXtjV0ctIMA=
@@ -202,6 +212,7 @@ github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4=
github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ=
github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU=
github.com/hsanjuan/ipfs-lite v1.8.1 h1:Rpd9bTXYgkmnt8M5QsZnWwtW6ebxAB7HlU/d0zE4BmA=
+github.com/hsanjuan/ipfs-lite v1.8.1/go.mod h1:oGCaHBi+I73UFjc6wPAQ75hr4FjJhoqy6YPZjtghDIc=
github.com/huin/goupnp v1.3.0 h1:UvLUlWDNpoUdYzb2TCn+MuTWtcjXKSza2n6CBdQ0xXc=
github.com/huin/goupnp v1.3.0/go.mod h1:gnGPsThkYa7bFi/KWmEysQRf48l2dvR5bxr2OFckNX8=
github.com/iancoleman/strcase v0.3.0 h1:nTXanmYxhfFAMjZL34Ov6gkzEsSJZ5DbhxWjvSASxEI=
@@ -215,18 +226,23 @@ github.com/ipfs/bbloom v0.0.4/go.mod h1:cS9YprKXpoZ9lT0n/Mw/a6/aFV6DTjTLYHeA+gyq
github.com/ipfs/boxo v0.17.0 h1:fVXAb12dNbraCX1Cdid5BB6Kl62gVLNVA+e0EYMqAU0=
github.com/ipfs/boxo v0.17.0/go.mod h1:pIZgTWdm3k3pLF9Uq6MB8JEcW07UDwNJjlXW1HELW80=
github.com/ipfs/go-bitfield v1.1.0 h1:fh7FIo8bSwaJEh6DdTWbCeZ1eqOaOkKFI74SCnsWbGA=
+github.com/ipfs/go-bitfield v1.1.0/go.mod h1:paqf1wjq/D2BBmzfTVFlJQ9IlFOZpg422HL0HqsGWHU=
github.com/ipfs/go-block-format v0.2.0 h1:ZqrkxBA2ICbDRbK8KJs/u0O3dlp6gmAuuXUJNiW1Ycs=
github.com/ipfs/go-block-format v0.2.0/go.mod h1:+jpL11nFx5A/SPpsoBn6Bzkra/zaArfSmsknbPMYgzM=
github.com/ipfs/go-cid v0.4.1 h1:A/T3qGvxi4kpKWWcPC/PgbvDA2bjVLO7n4UeVwnbs/s=
github.com/ipfs/go-cid v0.4.1/go.mod h1:uQHwDeX4c6CtyrFwdqyhpNcxVewur1M7l7fNU7LKwZk=
github.com/ipfs/go-cidutil v0.1.0 h1:RW5hO7Vcf16dplUU60Hs0AKDkQAVPVplr7lk97CFL+Q=
+github.com/ipfs/go-cidutil v0.1.0/go.mod h1:e7OEVBMIv9JaOxt9zaGEmAoSlXW9jdFZ5lP/0PwcfpA=
github.com/ipfs/go-datastore v0.6.0 h1:JKyz+Gvz1QEZw0LsX1IBn+JFCJQH4SJVFtM4uWU0Myk=
github.com/ipfs/go-datastore v0.6.0/go.mod h1:rt5M3nNbSO/8q1t4LNkLyUwRs8HupMeN/8O4Vn9YAT8=
github.com/ipfs/go-detect-race v0.0.1 h1:qX/xay2W3E4Q1U7d9lNs1sU9nvguX0a7319XbyQ6cOk=
github.com/ipfs/go-detect-race v0.0.1/go.mod h1:8BNT7shDZPo99Q74BpGMK+4D8Mn4j46UU0LZ723meps=
github.com/ipfs/go-ds-badger v0.3.0 h1:xREL3V0EH9S219kFFueOYJJTcjgNSZ2HY1iSvN7U1Ro=
+github.com/ipfs/go-ds-badger v0.3.0/go.mod h1:1ke6mXNqeV8K3y5Ak2bAA0osoTfmxUdupVCGm4QUIek=
github.com/ipfs/go-ds-leveldb v0.5.0 h1:s++MEBbD3ZKc9/8/njrn4flZLnCuY9I79v94gBUNumo=
+github.com/ipfs/go-ds-leveldb v0.5.0/go.mod h1:d3XG9RUDzQ6V4SHi8+Xgj9j1XuEk1z82lquxrVbml/Q=
github.com/ipfs/go-ipfs-blocksutil v0.0.1 h1:Eh/H4pc1hsvhzsQoMEP3Bke/aW5P5rVM1IWFJMcGIPQ=
+github.com/ipfs/go-ipfs-blocksutil v0.0.1/go.mod h1:Yq4M86uIOmxmGPUHv/uI7uKqZNtLb449gwKqXjIsnRk=
github.com/ipfs/go-ipfs-delay v0.0.1 h1:r/UXYyRcddO6thwOnhiznIAiSvxMECGgtv35Xs1IeRQ=
github.com/ipfs/go-ipfs-delay v0.0.1/go.mod h1:8SP1YXK1M1kXuc4KJZINY3TQQ03J2rwBG9QfXmbRPrw=
github.com/ipfs/go-ipfs-pq v0.0.3 h1:YpoHVJB+jzK15mr/xsWC574tyDLkezVrDNeaalQBsTE=
@@ -278,6 +294,7 @@ github.com/koron/go-ssdp v0.0.4/go.mod h1:oDXq+E5IL5q0U8uSBcoAXzTzInwy5lEgC91HoK
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI=
github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE=
+github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk=
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
github.com/kr/pty v1.1.3/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
@@ -308,6 +325,7 @@ github.com/libp2p/go-libp2p-record v0.2.0/go.mod h1:I+3zMkvvg5m2OcSdoL0KPljyJyvN
github.com/libp2p/go-libp2p-routing-helpers v0.7.3 h1:u1LGzAMVRK9Nqq5aYDVOiq/HaB93U9WWczBzGyAC5ZY=
github.com/libp2p/go-libp2p-routing-helpers v0.7.3/go.mod h1:cN4mJAD/7zfPKXBcs9ze31JGYAZgzdABEm+q/hkswb8=
github.com/libp2p/go-libp2p-testing v0.12.0 h1:EPvBb4kKMWO29qP4mZGyhVzUyR25dvfUIK5WDu6iPUA=
+github.com/libp2p/go-libp2p-testing v0.12.0/go.mod h1:KcGDRXyN7sQCllucn1cOOS+Dmm7ujhfEyXQL5lvkcPg=
github.com/libp2p/go-msgio v0.3.0 h1:mf3Z8B1xcFN314sWX+2vOTShIE0Mmn2TXn3YCUQGNj0=
github.com/libp2p/go-msgio v0.3.0/go.mod h1:nyRM819GmVaF9LX3l03RMh10QdOroF++NBbxAb0mmDM=
github.com/libp2p/go-nat v0.2.0 h1:Tyz+bUFAYqGyJ/ppPPymMGbIgNRH+WqC5QrT5fKrrGk=
@@ -319,6 +337,7 @@ github.com/libp2p/go-reuseport v0.4.0/go.mod h1:ZtI03j/wO5hZVDFo2jKywN6bYKWLOy8S
github.com/libp2p/go-yamux/v4 v4.0.1 h1:FfDR4S1wj6Bw2Pqbc8Uz7pCxeRBPbwsBbEdfwiCypkQ=
github.com/libp2p/go-yamux/v4 v4.0.1/go.mod h1:NWjl8ZTLOGlozrXSOZ/HlfG++39iKNnM5wwmtQP1YB4=
github.com/libp2p/zeroconf/v2 v2.2.0 h1:Cup06Jv6u81HLhIj1KasuNM/RHHrJ8T7wOTS4+Tv53Q=
+github.com/libp2p/zeroconf/v2 v2.2.0/go.mod h1:fuJqLnUwZTshS3U/bMRJ3+ow/v9oid1n0DmyYyNO1Xs=
github.com/lunixbochs/vtclean v1.0.0/go.mod h1:pHhQNgMf3btfWnGBVipUOjRYhoOsdGqdm/+2c2E2WMI=
github.com/magiconair/properties v1.8.7 h1:IeQXZAiQcpL9mgcAe1Nu6cX9LLw6ExEHKjN0VQdvPDY=
github.com/magiconair/properties v1.8.7/go.mod h1:Dhd985XPs7jluiymwWYZ0G4Z61jb3vdS329zhj2hYo0=
@@ -348,6 +367,7 @@ github.com/minio/sha256-simd v0.1.1-0.20190913151208-6de447530771/go.mod h1:B5e1
github.com/minio/sha256-simd v1.0.1 h1:6kaan5IFmwTNynnKKpDHe6FWHohJOHhCPchzK49dzMM=
github.com/minio/sha256-simd v1.0.1/go.mod h1:Pz6AKMiUdngCLpeTL/RJY1M9rUuPMYujV5xJjtbRSN8=
github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y=
+github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0=
github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY=
github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo=
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
@@ -398,6 +418,7 @@ github.com/onsi/gomega v1.5.0/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1Cpa
github.com/onsi/gomega v1.7.1/go.mod h1:XdKZgCCFLUoM/7CFJVPcG8C1xQ1AJ0vpAezJrB7JYyY=
github.com/onsi/gomega v1.10.1/go.mod h1:iN09h71vgCQne3DLsj+A5owkum+a2tYe+TOCB1ybHNo=
github.com/onsi/gomega v1.29.0 h1:KIA/t2t5UBzoirT4H9tsML45GEbo3ouUnBHsCfD2tVg=
+github.com/onsi/gomega v1.29.0/go.mod h1:9sxs+SwGrKI0+PWe4Fxa9tFQQBG5xSsSbMXOI8PPpoQ=
github.com/opencontainers/runtime-spec v1.0.2/go.mod h1:jwyrGlmzljRJv/Fgzds9SsS/C5hL+LL3ko9hs6T5lQ0=
github.com/opencontainers/runtime-spec v1.1.0 h1:HHUyrt9mwHUjtasSbXSMvs4cyFxh+Bll4AjJ9odEGpg=
github.com/opencontainers/runtime-spec v1.1.0/go.mod h1:jwyrGlmzljRJv/Fgzds9SsS/C5hL+LL3ko9hs6T5lQ0=
@@ -442,7 +463,8 @@ github.com/quic-go/webtransport-go v0.6.0/go.mod h1:9KjU4AEBqEQidGHNDkZrb8CAa1ab
github.com/raulk/go-watchdog v1.3.0 h1:oUmdlHxdkXRJlwfG0O9omj8ukerm8MEQavSiDTEtBsk=
github.com/raulk/go-watchdog v1.3.0/go.mod h1:fIvOnLbF0b0ZwkB9YU4mOW9Did//4vPZtDqv66NfsMU=
github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4=
-github.com/rogpeppe/go-internal v1.10.0 h1:TMyTOH3F/DB16zRVcYyreMH6GnZZrwQVAoYjRBZyWFQ=
+github.com/rogpeppe/go-internal v1.12.0 h1:exVL4IDcn6na9z1rAb56Vxr+CgyK3nn3O+epU5NdKM8=
+github.com/rogpeppe/go-internal v1.12.0/go.mod h1:E+RYuTGaKKdloAfM02xzb0FW3Paa99yedzYV+kq4uf4=
github.com/russross/blackfriday v1.5.2/go.mod h1:JO/DiYxRf+HjHt06OyowR9PTA263kcR/rfWxYHBV53g=
github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk=
@@ -452,6 +474,7 @@ github.com/sagikazarmark/locafero v0.4.0/go.mod h1:Pe1W6UlPYUk/+wc/6KFhbORCfqzgY
github.com/sagikazarmark/slog-shim v0.1.0 h1:diDBnUNK9N/354PgrxMywXnAwEr1QZcOr6gto+ugjYE=
github.com/sagikazarmark/slog-shim v0.1.0/go.mod h1:SrcSrq8aKtyuqEI1uvTDTK1arOWRIczQRv+GVI1AkeQ=
github.com/samber/lo v1.39.0 h1:4gTz1wUhNYLhFSKl6O+8peW0v2F4BCY034GRpU9WnuA=
+github.com/samber/lo v1.39.0/go.mod h1:+m/ZKRl6ClXCE2Lgf3MsQlWfh4bn1bz6CXEOxnEXnEA=
github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo=
github.com/shurcooL/component v0.0.0-20170202220835-f88ec8f54cc4/go.mod h1:XhFIlyj5a1fBNx5aJTbKoIq0mNaPvOagO+HjB3EtxrY=
github.com/shurcooL/events v0.0.0-20181021180414-410e4ca65f48/go.mod h1:5u70Mqkb5O5cxEA8nxTsgrgLehJeAw6Oc4Ab1c/P1HM=
@@ -526,7 +549,9 @@ github.com/syndtr/goleveldb v1.0.1-0.20210819022825-2ae1ddf74ef7 h1:epCh84lMvA70
github.com/syndtr/goleveldb v1.0.1-0.20210819022825-2ae1ddf74ef7/go.mod h1:q4W45IWZaF22tdD+VEXcAWRA037jwmWEB5VWYORlTpc=
github.com/tarm/serial v0.0.0-20180830185346-98f6abe2eb07/go.mod h1:kDXzergiv9cbyO7IOYJZWg1U88JhDg3PB6klq9Hg2pA=
github.com/textileio/go-datastore-extensions v1.0.1 h1:qIJGqJaigQ1wD4TdwS/hf73u0HChhXvvUSJuxBEKS+c=
+github.com/textileio/go-datastore-extensions v1.0.1/go.mod h1:Pzj9FDRkb55910dr/FX8M7WywvnS26gBgEDez1ZBuLE=
github.com/textileio/go-ds-badger3 v0.1.0 h1:q0kBuBmAcRUR3ClMSYlyw0224XeuzjjGinU53Qz1uXI=
+github.com/textileio/go-ds-badger3 v0.1.0/go.mod h1:z8LuXcihtZ91spEaqhEiNGIWx3E59iFq1HZj4gwwGrU=
github.com/textileio/go-log/v2 v2.1.3-gke-2 h1:YkMA5ua0Cf/X6CkbexInsoJ/HdaHQBlgiv9Yy9hddNM=
github.com/textileio/go-log/v2 v2.1.3-gke-2/go.mod h1:DwACkjFS3kjZZR/4Spx3aPfSsciyslwUe5bxV8CEU2w=
github.com/tidwall/btree v1.7.0 h1:L1fkJH/AuEh5zBnnBbmTwQ5Lt+bRJ5A8EWecslvo9iI=
@@ -542,10 +567,13 @@ github.com/viant/toolbox v0.24.0/go.mod h1:OxMCG57V0PXuIP2HNQrtJf2CjqdmbrOx5EkMI
github.com/vito/go-sse v1.0.0 h1:e6/iTrrvy8BRrOwJwmQmlndlil+TLdxXvHi55ZDzH6M=
github.com/vito/go-sse v1.0.0/go.mod h1:2wkcaQ+jtlZ94Uve8gYZjFpL68luAjssTINA2hpgcZs=
github.com/warpfork/go-testmark v0.12.1 h1:rMgCpJfwy1sJ50x0M0NgyphxYYPMOODIJHhsXyEHU0s=
+github.com/warpfork/go-testmark v0.12.1/go.mod h1:kHwy7wfvGSPh1rQJYKayD4AbtNaeyZdcGi9tNJTaa5Y=
github.com/warpfork/go-wish v0.0.0-20220906213052-39a1cc7a02d0 h1:GDDkbFiaK8jsSDJfjId/PEGEShv6ugrt4kYsC5UIDaQ=
github.com/warpfork/go-wish v0.0.0-20220906213052-39a1cc7a02d0/go.mod h1:x6AKhvSSexNrVSrViXSHUEbICjmGXhtgABaHIySUSGw=
github.com/whyrusleeping/base32 v0.0.0-20170828182744-c30ac30633cc h1:BCPnHtcboadS0DvysUuJXZ4lWVv5Bh5i7+tbIyi+ck4=
+github.com/whyrusleeping/base32 v0.0.0-20170828182744-c30ac30633cc/go.mod h1:r45hJU7yEoA81k6MWNhpMj/kms0n14dkzkxYHoB96UM=
github.com/whyrusleeping/chunker v0.0.0-20181014151217-fe64bd25879f h1:jQa4QT2UP9WYv2nzyawpKMOCl+Z/jW7djv2/J50lj9E=
+github.com/whyrusleeping/chunker v0.0.0-20181014151217-fe64bd25879f/go.mod h1:p9UJB6dDgdPgMJZs7UjUOdulKyRr9fqkS+6JKAInPy8=
github.com/whyrusleeping/go-keyspace v0.0.0-20160322163242-5b898ac5add1 h1:EKhdznlJHPMoKr0XTrX+IlJs1LH3lyx2nfr1dOlZ79k=
github.com/whyrusleeping/go-keyspace v0.0.0-20160322163242-5b898ac5add1/go.mod h1:8UvriyWtv5Q5EOgjHaSseUEdkQfvwFv1I/In/O2M9gc=
github.com/x448/float16 v0.8.4 h1:qLwI1I70+NjRFUR3zs1JPUCgaCXSh3SW62uAKT1mSBM=
@@ -556,25 +584,27 @@ github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1
go.opencensus.io v0.18.0/go.mod h1:vKdFvxhtzZ9onBp9VKHK8z/sRpBMnKAsufL7wlDrCOA=
go.opencensus.io v0.24.0 h1:y73uSU6J157QMP2kn2r30vwW1A2W2WFwSCGnAVxeaD0=
go.opencensus.io v0.24.0/go.mod h1:vNK8G9p7aAivkbmorf4v+7Hgx+Zs0yY+0fOtgBfjQKo=
-go.opentelemetry.io/otel v1.21.0 h1:hzLeKBZEL7Okw2mGzZ0cc4k/A7Fta0uoPgaJCr8fsFc=
-go.opentelemetry.io/otel v1.21.0/go.mod h1:QZzNPQPm1zLX4gZK4cMi+71eaorMSGT3A4znnUvNNEo=
-go.opentelemetry.io/otel/metric v1.21.0 h1:tlYWfeo+Bocx5kLEloTjbcDwBuELRrIFxwdQ36PlJu4=
-go.opentelemetry.io/otel/metric v1.21.0/go.mod h1:o1p3CA8nNHW8j5yuQLdc1eeqEaPfzug24uvsyIEJRWM=
-go.opentelemetry.io/otel/sdk v1.21.0 h1:FTt8qirL1EysG6sTQRZ5TokkU8d0ugCj8htOgThZXQ8=
-go.opentelemetry.io/otel/sdk v1.21.0/go.mod h1:Nna6Yv7PWTdgJHVRD9hIYywQBRx7pbox6nwBnZIxl/E=
-go.opentelemetry.io/otel/sdk/metric v1.21.0 h1:smhI5oD714d6jHE6Tie36fPx4WDFIg+Y6RfAY4ICcR0=
-go.opentelemetry.io/otel/sdk/metric v1.21.0/go.mod h1:FJ8RAsoPGv/wYMgBdUJXOm+6pzFY3YdljnXtv1SBE8Q=
-go.opentelemetry.io/otel/trace v1.21.0 h1:WD9i5gzvoUPuXIXH24ZNBudiarZDKuekPqi/E8fpfLc=
-go.opentelemetry.io/otel/trace v1.21.0/go.mod h1:LGbsEB0f9LGjN+OZaQQ26sohbOmiMR+BaslueVtS/qQ=
+go.opentelemetry.io/otel v1.22.0 h1:xS7Ku+7yTFvDfDraDIJVpw7XPyuHlB9MCiqqX5mcJ6Y=
+go.opentelemetry.io/otel v1.22.0/go.mod h1:eoV4iAi3Ea8LkAEI9+GFT44O6T/D0GWAVFyZVCC6pMI=
+go.opentelemetry.io/otel/metric v1.22.0 h1:lypMQnGyJYeuYPhOM/bgjbFM6WE44W1/T45er4d8Hhg=
+go.opentelemetry.io/otel/metric v1.22.0/go.mod h1:evJGjVpZv0mQ5QBRJoBF64yMuOf4xCWdXjK8pzFvliY=
+go.opentelemetry.io/otel/sdk v1.22.0 h1:6coWHw9xw7EfClIC/+O31R8IY3/+EiRFHevmHafB2Gw=
+go.opentelemetry.io/otel/sdk v1.22.0/go.mod h1:iu7luyVGYovrRpe2fmj3CVKouQNdTOkxtLzPvPz1DOc=
+go.opentelemetry.io/otel/sdk/metric v1.22.0 h1:ARrRetm1HCVxq0cbnaZQlfwODYJHo3gFL8Z3tSmHBcI=
+go.opentelemetry.io/otel/sdk/metric v1.22.0/go.mod h1:KjQGeMIDlBNEOo6HvjhxIec1p/69/kULDcp4gr0oLQQ=
+go.opentelemetry.io/otel/trace v1.22.0 h1:Hg6pPujv0XG9QaVbGOBVHunyuLcCC3jN7WEhPx83XD0=
+go.opentelemetry.io/otel/trace v1.22.0/go.mod h1:RbbHXVqKES9QhzZq/fE5UnOSILqRt40a21sPw2He1xo=
go.uber.org/atomic v1.6.0/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ=
go.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc=
go.uber.org/atomic v1.11.0 h1:ZvwS0R+56ePWxUNi+Atn9dWONBPp/AUETXlHW0DxSjE=
+go.uber.org/atomic v1.11.0/go.mod h1:LUxbIzbOniOlMKjJjyPfpl4v+PKK2cNJn91OQbhoJI0=
go.uber.org/dig v1.17.1 h1:Tga8Lz8PcYNsWsyHMZ1Vm0OQOUaJNDyvPImgbAu9YSc=
go.uber.org/dig v1.17.1/go.mod h1:Us0rSJiThwCv2GteUN0Q7OKvU7n5J4dxZ9JKUXozFdE=
go.uber.org/fx v1.20.1 h1:zVwVQGS8zYvhh9Xxcu4w1M6ESyeMzebzj2NbSayZ4Mk=
go.uber.org/fx v1.20.1/go.mod h1:iSYNbHf2y55acNCwCXKx7LbWb5WG1Bnue5RDXz1OREg=
go.uber.org/goleak v1.1.11-0.20210813005559-691160354723/go.mod h1:cwTWslyiVhfpKIDGSZEM2HlOvcqm+tG4zioyIeLoqMQ=
go.uber.org/goleak v1.2.0 h1:xqgm/S+aQvhWFTtR0XK3Jvg7z8kGV8P4X14IzwN3Eqk=
+go.uber.org/goleak v1.2.0/go.mod h1:XJYK+MuIchqpmGmUSAzotztawfKvYLUIgg7guXrwVUo=
go.uber.org/mock v0.4.0 h1:VcM4ZOtdbR4f6VXfiOpwpVJDL6lCReaZ6mw31wqh7KU=
go.uber.org/mock v0.4.0/go.mod h1:a6FSlNadKUHUa9IP5Vyt1zh4fC7uAwxMutEAscFbkZc=
go.uber.org/multierr v1.5.0/go.mod h1:FeouvMocqHpRaaGuG9EjoKcStLC43Zu/fmqdUMPcKYU=
@@ -770,6 +800,7 @@ gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY=
+gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gopkg.in/yaml.v3 v3.0.0/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
diff --git a/http/client_collection.go b/http/client_collection.go
index 95a81df84f..b44f5045fc 100644
--- a/http/client_collection.go
+++ b/http/client_collection.go
@@ -20,6 +20,7 @@ import (
"net/url"
"strings"
+ "github.com/sourcenetwork/immutable"
sse "github.com/vito/go-sse/sse"
"github.com/sourcenetwork/defradb/client"
@@ -39,7 +40,7 @@ func (c *Collection) Description() client.CollectionDescription {
return c.def.Description
}
-func (c *Collection) Name() string {
+func (c *Collection) Name() immutable.Option[string] {
return c.Description().Name
}
@@ -60,7 +61,11 @@ func (c *Collection) Definition() client.CollectionDefinition {
}
func (c *Collection) Create(ctx context.Context, doc *client.Document) error {
- methodURL := c.http.baseURL.JoinPath("collections", c.Description().Name)
+ if !c.Description().Name.HasValue() {
+ return client.ErrOperationNotPermittedOnNamelessCols
+ }
+
+ methodURL := c.http.baseURL.JoinPath("collections", c.Description().Name.Value())
body, err := doc.String()
if err != nil {
@@ -79,7 +84,10 @@ func (c *Collection) Create(ctx context.Context, doc *client.Document) error {
}
func (c *Collection) CreateMany(ctx context.Context, docs []*client.Document) error {
- methodURL := c.http.baseURL.JoinPath("collections", c.Description().Name)
+ if !c.Description().Name.HasValue() {
+ return client.ErrOperationNotPermittedOnNamelessCols
+ }
+ methodURL := c.http.baseURL.JoinPath("collections", c.Description().Name.Value())
var docMapList []json.RawMessage
for _, doc := range docs {
@@ -108,7 +116,11 @@ func (c *Collection) CreateMany(ctx context.Context, docs []*client.Document) er
}
func (c *Collection) Update(ctx context.Context, doc *client.Document) error {
- methodURL := c.http.baseURL.JoinPath("collections", c.Description().Name, doc.ID().String())
+ if !c.Description().Name.HasValue() {
+ return client.ErrOperationNotPermittedOnNamelessCols
+ }
+
+ methodURL := c.http.baseURL.JoinPath("collections", c.Description().Name.Value(), doc.ID().String())
body, err := doc.ToJSONPatch()
if err != nil {
@@ -138,7 +150,11 @@ func (c *Collection) Save(ctx context.Context, doc *client.Document) error {
}
func (c *Collection) Delete(ctx context.Context, docID client.DocID) (bool, error) {
- methodURL := c.http.baseURL.JoinPath("collections", c.Description().Name, docID.String())
+ if !c.Description().Name.HasValue() {
+ return false, client.ErrOperationNotPermittedOnNamelessCols
+ }
+
+ methodURL := c.http.baseURL.JoinPath("collections", c.Description().Name.Value(), docID.String())
req, err := http.NewRequestWithContext(ctx, http.MethodDelete, methodURL.String(), nil)
if err != nil {
@@ -176,7 +192,11 @@ func (c *Collection) updateWith(
ctx context.Context,
request CollectionUpdateRequest,
) (*client.UpdateResult, error) {
- methodURL := c.http.baseURL.JoinPath("collections", c.Description().Name)
+ if !c.Description().Name.HasValue() {
+ return nil, client.ErrOperationNotPermittedOnNamelessCols
+ }
+
+ methodURL := c.http.baseURL.JoinPath("collections", c.Description().Name.Value())
body, err := json.Marshal(request)
if err != nil {
@@ -247,7 +267,11 @@ func (c *Collection) deleteWith(
ctx context.Context,
request CollectionDeleteRequest,
) (*client.DeleteResult, error) {
- methodURL := c.http.baseURL.JoinPath("collections", c.Description().Name)
+ if !c.Description().Name.HasValue() {
+ return nil, client.ErrOperationNotPermittedOnNamelessCols
+ }
+
+ methodURL := c.http.baseURL.JoinPath("collections", c.Description().Name.Value())
body, err := json.Marshal(request)
if err != nil {
@@ -287,12 +311,16 @@ func (c *Collection) DeleteWithDocIDs(ctx context.Context, docIDs []client.DocID
}
func (c *Collection) Get(ctx context.Context, docID client.DocID, showDeleted bool) (*client.Document, error) {
+ if !c.Description().Name.HasValue() {
+ return nil, client.ErrOperationNotPermittedOnNamelessCols
+ }
+
query := url.Values{}
if showDeleted {
query.Add("show_deleted", "true")
}
- methodURL := c.http.baseURL.JoinPath("collections", c.Description().Name, docID.String())
+ methodURL := c.http.baseURL.JoinPath("collections", c.Description().Name.Value(), docID.String())
methodURL.RawQuery = query.Encode()
req, err := http.NewRequestWithContext(ctx, http.MethodGet, methodURL.String(), nil)
@@ -320,7 +348,11 @@ func (c *Collection) WithTxn(tx datastore.Txn) client.Collection {
}
func (c *Collection) GetAllDocIDs(ctx context.Context) (<-chan client.DocIDResult, error) {
- methodURL := c.http.baseURL.JoinPath("collections", c.Description().Name)
+ if !c.Description().Name.HasValue() {
+ return nil, client.ErrOperationNotPermittedOnNamelessCols
+ }
+
+ methodURL := c.http.baseURL.JoinPath("collections", c.Description().Name.Value())
req, err := http.NewRequestWithContext(ctx, http.MethodGet, methodURL.String(), nil)
if err != nil {
@@ -372,7 +404,11 @@ func (c *Collection) CreateIndex(
ctx context.Context,
indexDesc client.IndexDescription,
) (client.IndexDescription, error) {
- methodURL := c.http.baseURL.JoinPath("collections", c.Description().Name, "indexes")
+ if !c.Description().Name.HasValue() {
+ return client.IndexDescription{}, client.ErrOperationNotPermittedOnNamelessCols
+ }
+
+ methodURL := c.http.baseURL.JoinPath("collections", c.Description().Name.Value(), "indexes")
body, err := json.Marshal(&indexDesc)
if err != nil {
@@ -390,7 +426,11 @@ func (c *Collection) CreateIndex(
}
func (c *Collection) DropIndex(ctx context.Context, indexName string) error {
- methodURL := c.http.baseURL.JoinPath("collections", c.Description().Name, "indexes", indexName)
+ if !c.Description().Name.HasValue() {
+ return client.ErrOperationNotPermittedOnNamelessCols
+ }
+
+ methodURL := c.http.baseURL.JoinPath("collections", c.Description().Name.Value(), "indexes", indexName)
req, err := http.NewRequestWithContext(ctx, http.MethodDelete, methodURL.String(), nil)
if err != nil {
@@ -401,7 +441,11 @@ func (c *Collection) DropIndex(ctx context.Context, indexName string) error {
}
func (c *Collection) GetIndexes(ctx context.Context) ([]client.IndexDescription, error) {
- methodURL := c.http.baseURL.JoinPath("collections", c.Description().Name, "indexes")
+ if !c.Description().Name.HasValue() {
+ return nil, client.ErrOperationNotPermittedOnNamelessCols
+ }
+
+ methodURL := c.http.baseURL.JoinPath("collections", c.Description().Name.Value(), "indexes")
req, err := http.NewRequestWithContext(ctx, http.MethodGet, methodURL.String(), nil)
if err != nil {
diff --git a/http/handler.go b/http/handler.go
index 1df8987964..328ea8fab9 100644
--- a/http/handler.go
+++ b/http/handler.go
@@ -29,15 +29,7 @@ var Version string = "v0"
// playgroundHandler is set when building with the playground build tag
var playgroundHandler http.Handler = http.HandlerFunc(http.NotFound)
-type Handler struct {
- db client.DB
- mux *chi.Mux
- txs *sync.Map
-}
-
-func NewHandler(db client.DB, opts ServerOptions) (*Handler, error) {
- txs := &sync.Map{}
-
+func NewApiRouter() (*Router, error) {
tx_handler := &txHandler{}
store_handler := &storeHandler{}
collection_handler := &collectionHandler{}
@@ -50,12 +42,6 @@ func NewHandler(db client.DB, opts ServerOptions) (*Handler, error) {
return nil, err
}
- router.AddMiddleware(
- ApiMiddleware(db, txs, opts),
- TransactionMiddleware,
- StoreMiddleware,
- )
-
tx_handler.bindRoutes(router)
store_handler.bindRoutes(router)
p2p_handler.bindRoutes(router)
@@ -74,6 +60,21 @@ func NewHandler(db client.DB, opts ServerOptions) (*Handler, error) {
if err := router.Validate(context.Background()); err != nil {
return nil, err
}
+ return router, nil
+}
+
+type Handler struct {
+ db client.DB
+ mux *chi.Mux
+ txs *sync.Map
+}
+
+func NewHandler(db client.DB, opts ServerOptions) (*Handler, error) {
+ router, err := NewApiRouter()
+ if err != nil {
+ return nil, err
+ }
+ txs := &sync.Map{}
mux := chi.NewMux()
mux.Use(
@@ -81,7 +82,14 @@ func NewHandler(db client.DB, opts ServerOptions) (*Handler, error) {
middleware.Recoverer,
CorsMiddleware(opts),
)
- mux.Mount("/api/"+Version, router)
+ mux.Route("/api/"+Version, func(r chi.Router) {
+ r.Use(
+ ApiMiddleware(db, txs, opts),
+ TransactionMiddleware,
+ StoreMiddleware,
+ )
+ r.Handle("/*", router)
+ })
mux.Get("/openapi.json", func(rw http.ResponseWriter, req *http.Request) {
responseJSON(rw, http.StatusOK, router.OpenAPI())
})
diff --git a/net/peer_replicator.go b/net/peer_replicator.go
index 0506e018c4..8756959db8 100644
--- a/net/peer_replicator.go
+++ b/net/peer_replicator.go
@@ -94,7 +94,7 @@ func (p *Peer) SetReplicator(ctx context.Context, rep client.Replicator) error {
for _, col := range added {
keysCh, err := col.WithTxn(txn).GetAllDocIDs(ctx)
if err != nil {
- return NewErrReplicatorDocID(err, col.Name(), rep.Info.ID)
+ return NewErrReplicatorDocID(err, col.Name().Value(), rep.Info.ID)
}
p.pushToReplicator(ctx, txn, col, keysCh, rep.Info.ID)
}
diff --git a/planner/datasource.go b/planner/datasource.go
index 6cfb8cf728..cc0bb0a019 100644
--- a/planner/datasource.go
+++ b/planner/datasource.go
@@ -32,7 +32,7 @@ func (p *Planner) getCollectionScanPlan(mapperSelect *mapper.Select) (planSource
}
var plan planNode
- if col.Description().BaseQuery != nil {
+ if len(col.Description().QuerySources()) > 0 {
var err error
plan, err = p.View(mapperSelect, col.Description())
if err != nil {
diff --git a/planner/filter/copy_field.go b/planner/filter/copy_field.go
index 70b5dc2956..fff974da06 100644
--- a/planner/filter/copy_field.go
+++ b/planner/filter/copy_field.go
@@ -16,6 +16,7 @@ import (
// CopyField copies the given field from the provided filter.
// Multiple fields can be passed to copy related objects with a certain field.
+// In this case every subsequent field is a sub field of the previous one. Eg. bool.author.name
// The result filter preserves the structure of the original filter.
func CopyField(filter *mapper.Filter, fields ...mapper.Field) *mapper.Filter {
if filter == nil || len(fields) == 0 {
diff --git a/planner/filter/copy_field_test.go b/planner/filter/copy_field_test.go
index 1714db55b6..611f1d1fd8 100644
--- a/planner/filter/copy_field_test.go
+++ b/planner/filter/copy_field_test.go
@@ -120,12 +120,12 @@ func TestCopyField(t *testing.T) {
}
}
-func TestCopyFieldOfNullFilter(t *testing.T) {
+func TestCopyField_IfFilterIsNil_NoOp(t *testing.T) {
actualFilter := CopyField(nil, mapper.Field{Index: 1})
assert.Nil(t, actualFilter)
}
-func TestCopyFieldWithNoFieldGiven(t *testing.T) {
+func TestCopyField_IfNoFieldGiven_NoOp(t *testing.T) {
filter := mapper.NewFilter()
filter.Conditions = map[connor.FilterKey]any{
&mapper.PropertyIndex{Index: 0}: &mapper.Operator{Operation: "_eq"},
@@ -133,3 +133,18 @@ func TestCopyFieldWithNoFieldGiven(t *testing.T) {
actualFilter := CopyField(filter)
assert.Nil(t, actualFilter)
}
+
+func TestCopyField_IfSecondFieldIsNotSubField_NoOp(t *testing.T) {
+ mapping := getDocMapping()
+ inputFilter := mapper.ToFilter(request.Filter{Conditions: map[string]any{
+ "name": m("_eq", "John"),
+ "age": m("_gt", 55),
+ }}, mapping)
+
+ var actualFilter *mapper.Filter
+ assert.NotPanics(t, func() {
+ actualFilter = CopyField(inputFilter, mapper.Field{Index: authorNameInd}, mapper.Field{Index: 666})
+ })
+
+ assert.Nil(t, actualFilter)
+}
diff --git a/planner/filter/normalize.go b/planner/filter/normalize.go
index 181b1f8485..65317f2170 100644
--- a/planner/filter/normalize.go
+++ b/planner/filter/normalize.go
@@ -185,7 +185,12 @@ func normalizeProperties(parentKey connor.FilterKey, conditions []any) []any {
// if canMergeAnd is true, all _and groups will be merged
props := make(map[int][]any)
for _, c := range conditions {
- for key, val := range c.(map[connor.FilterKey]any) {
+ cMap, ok := c.(map[connor.FilterKey]any)
+ if !ok {
+ result = append(result, c)
+ continue
+ }
+ for key, val := range cMap {
op, ok := key.(*mapper.Operator)
if canMergeAnd && ok && op.Operation == request.FilterOpAnd {
merge = append(merge, val.([]any)...)
diff --git a/planner/filter/split.go b/planner/filter/split.go
index 1ef153746b..e562c8165a 100644
--- a/planner/filter/split.go
+++ b/planner/filter/split.go
@@ -13,7 +13,9 @@ import (
"github.com/sourcenetwork/defradb/planner/mapper"
)
-// SplitByField splits the provided filter into 2 filters based on field.
+// SplitByFields splits the provided filter into 2 filters based on fields.
+// It extract the conditions that apply to the provided fields and returns them
+// as the second returned filter.
// It can be used for extracting a supType
// Eg. (filter: {age: 10, name: "bob", author: {birthday: "June 26, 1990", ...}, ...})
//
@@ -22,13 +24,26 @@ import (
//
// And the subType filter is the conditions that apply to the queried sub type
// ie: {birthday: "June 26, 1990", ...}.
-func SplitByField(filter *mapper.Filter, field mapper.Field) (*mapper.Filter, *mapper.Filter) {
+func SplitByFields(filter *mapper.Filter, fields ...mapper.Field) (*mapper.Filter, *mapper.Filter) {
if filter == nil {
return nil, nil
}
- splitF := CopyField(filter, field)
- RemoveField(filter, field)
+ if len(fields) == 0 {
+ return filter, nil
+ }
+
+ splitF := CopyField(filter, fields[0])
+ RemoveField(filter, fields[0])
+
+ for _, field := range fields[1:] {
+ newSplitF := CopyField(filter, field)
+ if newSplitF == nil {
+ continue
+ }
+ splitF.Conditions = Merge(splitF.Conditions, newSplitF.Conditions)
+ RemoveField(filter, field)
+ }
if len(filter.Conditions) == 0 {
filter = nil
diff --git a/planner/filter/split_test.go b/planner/filter/split_test.go
index 86fbb0b44a..221bd31527 100644
--- a/planner/filter/split_test.go
+++ b/planner/filter/split_test.go
@@ -21,7 +21,7 @@ import (
func TestSplitFilter(t *testing.T) {
tests := []struct {
name string
- inputField mapper.Field
+ inputFields []mapper.Field
inputFilter map[string]any
expectedFilter1 map[string]any
expectedFilter2 map[string]any
@@ -32,7 +32,7 @@ func TestSplitFilter(t *testing.T) {
"name": m("_eq", "John"),
"age": m("_gt", 55),
},
- inputField: mapper.Field{Index: authorAgeInd},
+ inputFields: []mapper.Field{{Index: authorAgeInd}},
expectedFilter1: m("name", m("_eq", "John")),
expectedFilter2: m("age", m("_gt", 55)),
},
@@ -41,7 +41,7 @@ func TestSplitFilter(t *testing.T) {
inputFilter: map[string]any{
"age": m("_gt", 55),
},
- inputField: mapper.Field{Index: authorAgeInd},
+ inputFields: []mapper.Field{{Index: authorAgeInd}},
expectedFilter1: nil,
expectedFilter2: m("age", m("_gt", 55)),
},
@@ -50,17 +50,68 @@ func TestSplitFilter(t *testing.T) {
inputFilter: map[string]any{
"name": m("_eq", "John"),
},
- inputField: mapper.Field{Index: authorAgeInd},
+ inputFields: []mapper.Field{{Index: authorAgeInd}},
expectedFilter1: m("name", m("_eq", "John")),
expectedFilter2: nil,
},
+ {
+ name: "split by 2 fields",
+ inputFilter: map[string]any{
+ "name": m("_eq", "John"),
+ "age": m("_gt", 55),
+ "published": m("_eq", true),
+ "verified": m("_eq", false),
+ },
+ inputFields: []mapper.Field{{Index: authorNameInd}, {Index: authorAgeInd}, {Index: authorVerifiedInd}},
+ expectedFilter1: m("published", m("_eq", true)),
+ expectedFilter2: map[string]any{
+ "name": m("_eq", "John"),
+ "age": m("_gt", 55),
+ "verified": m("_eq", false),
+ },
+ },
+ {
+ name: "split by fields that are not present",
+ inputFilter: map[string]any{
+ "name": m("_eq", "John"),
+ "age": m("_gt", 55),
+ "verified": m("_eq", false),
+ },
+ inputFields: []mapper.Field{
+ {Index: authorNameInd},
+ {Index: 100},
+ {Index: authorAgeInd},
+ {Index: 200},
+ },
+ expectedFilter1: m("verified", m("_eq", false)),
+ expectedFilter2: map[string]any{
+ "name": m("_eq", "John"),
+ "age": m("_gt", 55),
+ },
+ },
+ {
+ name: "filter with two []any slices",
+ inputFilter: map[string]any{
+ "age": m("_in", []any{10, 20, 30}),
+ "name": m("_in", []any{"John", "Bob"}),
+ },
+ inputFields: []mapper.Field{
+ {Index: authorNameInd},
+ {Index: authorAgeInd},
+ },
+ expectedFilter1: nil,
+ expectedFilter2: map[string]any{
+ "age": m("_in", []any{10, 20, 30}),
+ "name": m("_in", []any{"John", "Bob"}),
+ },
+ },
}
mapping := getDocMapping()
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
inputFilter := mapper.ToFilter(request.Filter{Conditions: test.inputFilter}, mapping)
- actualFilter1, actualFilter2 := SplitByField(inputFilter, test.inputField)
+ actualFilter1, actualFilter2 := SplitByFields(inputFilter, test.inputFields...)
expectedFilter1 := mapper.ToFilter(request.Filter{Conditions: test.expectedFilter1}, mapping)
expectedFilter2 := mapper.ToFilter(request.Filter{Conditions: test.expectedFilter2}, mapping)
if expectedFilter1 != nil || actualFilter1 != nil {
@@ -73,8 +124,20 @@ func TestSplitFilter(t *testing.T) {
}
}
+func TestSplitFilter_WithNoFields_ReturnsInputFilter(t *testing.T) {
+ mapping := getDocMapping()
+ inputFilterConditions := map[string]any{
+ "name": m("_eq", "John"),
+ "age": m("_gt", 55),
+ }
+ inputFilter := mapper.ToFilter(request.Filter{Conditions: inputFilterConditions}, mapping)
+ actualFilter1, actualFilter2 := SplitByFields(inputFilter)
+ AssertEqualFilterMap(t, inputFilter.Conditions, actualFilter1.Conditions)
+ assert.Nil(t, actualFilter2)
+}
+
func TestSplitNullFilter(t *testing.T) {
- actualFilter1, actualFilter2 := SplitByField(nil, mapper.Field{Index: authorAgeInd})
+ actualFilter1, actualFilter2 := SplitByFields(nil, mapper.Field{Index: authorAgeInd})
assert.Nil(t, actualFilter1)
assert.Nil(t, actualFilter2)
}
diff --git a/planner/planner.go b/planner/planner.go
index 5a87983947..3ef8ff28e3 100644
--- a/planner/planner.go
+++ b/planner/planner.go
@@ -342,18 +342,17 @@ func (p *Planner) tryOptimizeJoinDirection(node *invertibleTypeJoin, parentPlan
)
slct := node.subType.(*selectTopNode).selectNode
desc := slct.collection.Description()
- schema := slct.collection.Schema()
- indexedFields := desc.CollectIndexedFields(&schema)
- for _, indField := range indexedFields {
- if ind, ok := filteredSubFields[indField.Name]; ok {
+ for subFieldName, subFieldInd := range filteredSubFields {
+ indexes := desc.GetIndexesOnField(subFieldName)
+ if len(indexes) > 0 {
subInd := node.documentMapping.FirstIndexOfName(node.subTypeName)
relatedField := mapper.Field{Name: node.subTypeName, Index: subInd}
fieldFilter := filter.UnwrapRelation(filter.CopyField(
parentPlan.selectNode.filter,
relatedField,
- mapper.Field{Name: indField.Name, Index: ind},
+ mapper.Field{Name: subFieldName, Index: subFieldInd},
), relatedField)
- err := node.invertJoinDirectionWithIndex(fieldFilter, indField)
+ err := node.invertJoinDirectionWithIndex(fieldFilter, indexes[0])
if err != nil {
return err
}
diff --git a/planner/scan.go b/planner/scan.go
index 19ae079f5f..a11f18de5e 100644
--- a/planner/scan.go
+++ b/planner/scan.go
@@ -138,7 +138,7 @@ func (n *scanNode) tryAddField(fieldName string) bool {
func (scan *scanNode) initFetcher(
cid immutable.Option[string],
- indexedField immutable.Option[client.FieldDescription],
+ index immutable.Option[client.IndexDescription],
) {
var f fetcher.Fetcher
if cid.HasValue() {
@@ -146,14 +146,17 @@ func (scan *scanNode) initFetcher(
} else {
f = new(fetcher.DocumentFetcher)
- if indexedField.HasValue() {
- typeIndex := scan.documentMapping.FirstIndexOfName(indexedField.Value().Name)
- field := mapper.Field{Index: typeIndex, Name: indexedField.Value().Name}
+ if index.HasValue() {
+ fields := make([]mapper.Field, 0, len(index.Value().Fields))
+ for _, field := range index.Value().Fields {
+ fieldName := field.Name
+ typeIndex := scan.documentMapping.FirstIndexOfName(fieldName)
+ fields = append(fields, mapper.Field{Index: typeIndex, Name: fieldName})
+ }
var indexFilter *mapper.Filter
- scan.filter, indexFilter = filter.SplitByField(scan.filter, field)
+ scan.filter, indexFilter = filter.SplitByFields(scan.filter, fields...)
if indexFilter != nil {
- fieldDesc, _ := scan.col.Schema().GetField(indexedField.Value().Name)
- f = fetcher.NewIndexFetcher(f, fieldDesc, indexFilter)
+ f = fetcher.NewIndexFetcher(f, index.Value(), indexFilter)
}
}
@@ -252,7 +255,7 @@ func (n *scanNode) simpleExplain() (map[string]any, error) {
}
// Add the collection attributes.
- simpleExplainMap[collectionNameLabel] = n.col.Name()
+ simpleExplainMap[collectionNameLabel] = n.col.Name().Value()
simpleExplainMap[collectionIDLabel] = n.col.Description().IDString()
// Add the spans attribute.
diff --git a/planner/select.go b/planner/select.go
index f1d85de9f3..ce7ff19030 100644
--- a/planner/select.go
+++ b/planner/select.go
@@ -290,26 +290,29 @@ func (n *selectNode) initSource() ([]aggregateNode, error) {
}
if isScanNode {
- origScan.initFetcher(n.selectReq.Cid, findFilteredByIndexedField(origScan))
+ origScan.initFetcher(n.selectReq.Cid, findIndexByFilteringField(origScan))
}
return aggregates, nil
}
-func findFilteredByIndexedField(scanNode *scanNode) immutable.Option[client.FieldDescription] {
- if scanNode.filter != nil {
- schema := scanNode.col.Schema()
- indexedFields := scanNode.col.Description().CollectIndexedFields(&schema)
- for i := range indexedFields {
- typeIndex := scanNode.documentMapping.FirstIndexOfName(indexedFields[i].Name)
- if scanNode.filter.HasIndex(typeIndex) {
- // we return the first found indexed field to keep it simple for now
- // more sophisticated optimization logic can be added later
- return immutable.Some(indexedFields[i])
- }
+func findIndexByFilteringField(scanNode *scanNode) immutable.Option[client.IndexDescription] {
+ if scanNode.filter == nil {
+ return immutable.None[client.IndexDescription]()
+ }
+ colDesc := scanNode.col.Description()
+
+ for _, field := range scanNode.col.Schema().Fields {
+ if _, isFiltered := scanNode.filter.ExternalConditions[field.Name]; !isFiltered {
+ continue
+ }
+ indexes := colDesc.GetIndexesOnField(field.Name)
+ if len(indexes) > 0 {
+ // we return the first found index. We will optimize it later.
+ return immutable.Some(indexes[0])
}
}
- return immutable.None[client.FieldDescription]()
+ return immutable.None[client.IndexDescription]()
}
func (n *selectNode) initFields(selectReq *mapper.Select) ([]aggregateNode, error) {
@@ -373,8 +376,9 @@ func (n *selectNode) initFields(selectReq *mapper.Select) ([]aggregateNode, erro
// commit query link fields are always added and need no special treatment here
// WARNING: It is important to check collection name is nil and the parent select name
// here else we risk falsely identifying user defined fields with the name `links` as a commit links field
- } else if n.collection.Description().BaseQuery == nil {
- // Views only contain embedded objects and don't require a traditional join here
+ } else if !(n.collection != nil && len(n.collection.Description().QuerySources()) > 0) {
+ // Collections sourcing data from queries only contain embedded objects and don't require
+ // a traditional join here
err := n.addTypeIndexJoin(f)
if err != nil {
return nil, err
diff --git a/planner/type_join.go b/planner/type_join.go
index fc4e6009cf..9d9a27b969 100644
--- a/planner/type_join.go
+++ b/planner/type_join.go
@@ -261,7 +261,7 @@ func (p *Planner) makeTypeJoinOne(
subTypeField, subTypeFieldNameFound := subTypeCol.Description().GetFieldByRelation(
subTypeFieldDesc.RelationName,
- parent.collection.Name(),
+ parent.collection.Name().Value(),
subTypeFieldDesc.Name,
&subTypeSchema,
)
@@ -359,7 +359,7 @@ func prepareScanNodeFilterForTypeJoin(
filter.RemoveField(scan.filter, subType.Field)
} else {
var parentFilter *mapper.Filter
- scan.filter, parentFilter = filter.SplitByField(scan.filter, subType.Field)
+ scan.filter, parentFilter = filter.SplitByFields(scan.filter, subType.Field)
if parentFilter != nil {
if parent.filter == nil {
parent.filter = parentFilter
@@ -396,7 +396,7 @@ func (p *Planner) makeTypeJoinMany(
rootField, rootNameFound := subTypeCol.Description().GetFieldByRelation(
subTypeFieldDesc.RelationName,
- parent.collection.Name(),
+ parent.collection.Name().Value(),
subTypeFieldDesc.Name,
&subTypeSchema,
)
@@ -606,12 +606,12 @@ func (join *invertibleTypeJoin) Next() (bool, error) {
func (join *invertibleTypeJoin) invertJoinDirectionWithIndex(
fieldFilter *mapper.Filter,
- field client.FieldDescription,
+ index client.IndexDescription,
) error {
subScan := getScanNode(join.subType)
subScan.tryAddField(join.rootName + request.RelatedObjectID)
subScan.filter = fieldFilter
- subScan.initFetcher(immutable.Option[string]{}, immutable.Some(field))
+ subScan.initFetcher(immutable.Option[string]{}, immutable.Some(index))
join.invert()
diff --git a/planner/view.go b/planner/view.go
index 48a026f306..f7b015becf 100644
--- a/planner/view.go
+++ b/planner/view.go
@@ -26,7 +26,9 @@ type viewNode struct {
}
func (p *Planner) View(query *mapper.Select, desc client.CollectionDescription) (*viewNode, error) {
- m, err := mapper.ToSelect(p.ctx, p.db, desc.BaseQuery)
+ baseQuery := (desc.Sources[0].(*client.QuerySource)).Query
+
+ m, err := mapper.ToSelect(p.ctx, p.db, &baseQuery)
if err != nil {
return nil, err
}
diff --git a/playground/package-lock.json b/playground/package-lock.json
index a38ae85877..a2a2dd122e 100644
--- a/playground/package-lock.json
+++ b/playground/package-lock.json
@@ -18,14 +18,14 @@
"@types/react": "^18.2.48",
"@types/react-dom": "^18.2.18",
"@types/swagger-ui-react": "^4.18.3",
- "@typescript-eslint/eslint-plugin": "^6.18.1",
- "@typescript-eslint/parser": "^6.18.1",
+ "@typescript-eslint/eslint-plugin": "^6.19.1",
+ "@typescript-eslint/parser": "^6.19.1",
"@vitejs/plugin-react-swc": "^3.5.0",
"eslint": "^8.56.0",
"eslint-plugin-react-hooks": "^4.6.0",
"eslint-plugin-react-refresh": "^0.4.5",
"typescript": "^5.3.3",
- "vite": "^5.0.11"
+ "vite": "^5.0.12"
}
},
"node_modules/@aashutoshrathi/word-wrap": {
@@ -2334,16 +2334,16 @@
"integrity": "sha512-EwmlvuaxPNej9+T4v5AuBPJa2x2UOJVdjCtDHgcDqitUeOtjnJKJ+apYjVcAoBEMjKW1VVFGZLUb5+qqa09XFA=="
},
"node_modules/@typescript-eslint/eslint-plugin": {
- "version": "6.18.1",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-6.18.1.tgz",
- "integrity": "sha512-nISDRYnnIpk7VCFrGcu1rnZfM1Dh9LRHnfgdkjcbi/l7g16VYRri3TjXi9Ir4lOZSw5N/gnV/3H7jIPQ8Q4daA==",
+ "version": "6.19.1",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-6.19.1.tgz",
+ "integrity": "sha512-roQScUGFruWod9CEyoV5KlCYrubC/fvG8/1zXuT0WTcxX87GnMMmnksMwSg99lo1xiKrBzw2icsJPMAw1OtKxg==",
"dev": true,
"dependencies": {
"@eslint-community/regexpp": "^4.5.1",
- "@typescript-eslint/scope-manager": "6.18.1",
- "@typescript-eslint/type-utils": "6.18.1",
- "@typescript-eslint/utils": "6.18.1",
- "@typescript-eslint/visitor-keys": "6.18.1",
+ "@typescript-eslint/scope-manager": "6.19.1",
+ "@typescript-eslint/type-utils": "6.19.1",
+ "@typescript-eslint/utils": "6.19.1",
+ "@typescript-eslint/visitor-keys": "6.19.1",
"debug": "^4.3.4",
"graphemer": "^1.4.0",
"ignore": "^5.2.4",
@@ -2368,16 +2368,63 @@
}
}
},
+ "node_modules/@typescript-eslint/eslint-plugin/node_modules/@typescript-eslint/scope-manager": {
+ "version": "6.19.1",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-6.19.1.tgz",
+ "integrity": "sha512-4CdXYjKf6/6aKNMSly/BP4iCSOpvMmqtDzRtqFyyAae3z5kkqEjKndR5vDHL8rSuMIIWP8u4Mw4VxLyxZW6D5w==",
+ "dev": true,
+ "dependencies": {
+ "@typescript-eslint/types": "6.19.1",
+ "@typescript-eslint/visitor-keys": "6.19.1"
+ },
+ "engines": {
+ "node": "^16.0.0 || >=18.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/typescript-eslint"
+ }
+ },
+ "node_modules/@typescript-eslint/eslint-plugin/node_modules/@typescript-eslint/types": {
+ "version": "6.19.1",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-6.19.1.tgz",
+ "integrity": "sha512-6+bk6FEtBhvfYvpHsDgAL3uo4BfvnTnoge5LrrCj2eJN8g3IJdLTD4B/jK3Q6vo4Ql/Hoip9I8aB6fF+6RfDqg==",
+ "dev": true,
+ "engines": {
+ "node": "^16.0.0 || >=18.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/typescript-eslint"
+ }
+ },
+ "node_modules/@typescript-eslint/eslint-plugin/node_modules/@typescript-eslint/visitor-keys": {
+ "version": "6.19.1",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-6.19.1.tgz",
+ "integrity": "sha512-gkdtIO+xSO/SmI0W68DBg4u1KElmIUo3vXzgHyGPs6cxgB0sa3TlptRAAE0hUY1hM6FcDKEv7aIwiTGm76cXfQ==",
+ "dev": true,
+ "dependencies": {
+ "@typescript-eslint/types": "6.19.1",
+ "eslint-visitor-keys": "^3.4.1"
+ },
+ "engines": {
+ "node": "^16.0.0 || >=18.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/typescript-eslint"
+ }
+ },
"node_modules/@typescript-eslint/parser": {
- "version": "6.18.1",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-6.18.1.tgz",
- "integrity": "sha512-zct/MdJnVaRRNy9e84XnVtRv9Vf91/qqe+hZJtKanjojud4wAVy/7lXxJmMyX6X6J+xc6c//YEWvpeif8cAhWA==",
+ "version": "6.19.1",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-6.19.1.tgz",
+ "integrity": "sha512-WEfX22ziAh6pRE9jnbkkLGp/4RhTpffr2ZK5bJ18M8mIfA8A+k97U9ZyaXCEJRlmMHh7R9MJZWXp/r73DzINVQ==",
"dev": true,
"dependencies": {
- "@typescript-eslint/scope-manager": "6.18.1",
- "@typescript-eslint/types": "6.18.1",
- "@typescript-eslint/typescript-estree": "6.18.1",
- "@typescript-eslint/visitor-keys": "6.18.1",
+ "@typescript-eslint/scope-manager": "6.19.1",
+ "@typescript-eslint/types": "6.19.1",
+ "@typescript-eslint/typescript-estree": "6.19.1",
+ "@typescript-eslint/visitor-keys": "6.19.1",
"debug": "^4.3.4"
},
"engines": {
@@ -2396,6 +2443,105 @@
}
}
},
+ "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/scope-manager": {
+ "version": "6.19.1",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-6.19.1.tgz",
+ "integrity": "sha512-4CdXYjKf6/6aKNMSly/BP4iCSOpvMmqtDzRtqFyyAae3z5kkqEjKndR5vDHL8rSuMIIWP8u4Mw4VxLyxZW6D5w==",
+ "dev": true,
+ "dependencies": {
+ "@typescript-eslint/types": "6.19.1",
+ "@typescript-eslint/visitor-keys": "6.19.1"
+ },
+ "engines": {
+ "node": "^16.0.0 || >=18.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/typescript-eslint"
+ }
+ },
+ "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/types": {
+ "version": "6.19.1",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-6.19.1.tgz",
+ "integrity": "sha512-6+bk6FEtBhvfYvpHsDgAL3uo4BfvnTnoge5LrrCj2eJN8g3IJdLTD4B/jK3Q6vo4Ql/Hoip9I8aB6fF+6RfDqg==",
+ "dev": true,
+ "engines": {
+ "node": "^16.0.0 || >=18.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/typescript-eslint"
+ }
+ },
+ "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/typescript-estree": {
+ "version": "6.19.1",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-6.19.1.tgz",
+ "integrity": "sha512-aFdAxuhzBFRWhy+H20nYu19+Km+gFfwNO4TEqyszkMcgBDYQjmPJ61erHxuT2ESJXhlhrO7I5EFIlZ+qGR8oVA==",
+ "dev": true,
+ "dependencies": {
+ "@typescript-eslint/types": "6.19.1",
+ "@typescript-eslint/visitor-keys": "6.19.1",
+ "debug": "^4.3.4",
+ "globby": "^11.1.0",
+ "is-glob": "^4.0.3",
+ "minimatch": "9.0.3",
+ "semver": "^7.5.4",
+ "ts-api-utils": "^1.0.1"
+ },
+ "engines": {
+ "node": "^16.0.0 || >=18.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/typescript-eslint"
+ },
+ "peerDependenciesMeta": {
+ "typescript": {
+ "optional": true
+ }
+ }
+ },
+ "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/visitor-keys": {
+ "version": "6.19.1",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-6.19.1.tgz",
+ "integrity": "sha512-gkdtIO+xSO/SmI0W68DBg4u1KElmIUo3vXzgHyGPs6cxgB0sa3TlptRAAE0hUY1hM6FcDKEv7aIwiTGm76cXfQ==",
+ "dev": true,
+ "dependencies": {
+ "@typescript-eslint/types": "6.19.1",
+ "eslint-visitor-keys": "^3.4.1"
+ },
+ "engines": {
+ "node": "^16.0.0 || >=18.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/typescript-eslint"
+ }
+ },
+ "node_modules/@typescript-eslint/parser/node_modules/brace-expansion": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz",
+ "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==",
+ "dev": true,
+ "dependencies": {
+ "balanced-match": "^1.0.0"
+ }
+ },
+ "node_modules/@typescript-eslint/parser/node_modules/minimatch": {
+ "version": "9.0.3",
+ "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.3.tgz",
+ "integrity": "sha512-RHiac9mvaRw0x3AYRgDC1CxAP7HTcNrrECeA8YYJeWnpo+2Q5CegtZjaotWTWxDG3UeGA1coE05iH1mPjT/2mg==",
+ "dev": true,
+ "dependencies": {
+ "brace-expansion": "^2.0.1"
+ },
+ "engines": {
+ "node": ">=16 || 14 >=14.17"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/isaacs"
+ }
+ },
"node_modules/@typescript-eslint/scope-manager": {
"version": "6.18.1",
"resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-6.18.1.tgz",
@@ -2414,13 +2560,13 @@
}
},
"node_modules/@typescript-eslint/type-utils": {
- "version": "6.18.1",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-6.18.1.tgz",
- "integrity": "sha512-wyOSKhuzHeU/5pcRDP2G2Ndci+4g653V43gXTpt4nbyoIOAASkGDA9JIAgbQCdCkcr1MvpSYWzxTz0olCn8+/Q==",
+ "version": "6.19.1",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-6.19.1.tgz",
+ "integrity": "sha512-0vdyld3ecfxJuddDjACUvlAeYNrHP/pDeQk2pWBR2ESeEzQhg52DF53AbI9QCBkYE23lgkhLCZNkHn2hEXXYIg==",
"dev": true,
"dependencies": {
- "@typescript-eslint/typescript-estree": "6.18.1",
- "@typescript-eslint/utils": "6.18.1",
+ "@typescript-eslint/typescript-estree": "6.19.1",
+ "@typescript-eslint/utils": "6.19.1",
"debug": "^4.3.4",
"ts-api-utils": "^1.0.1"
},
@@ -2440,6 +2586,88 @@
}
}
},
+ "node_modules/@typescript-eslint/type-utils/node_modules/@typescript-eslint/types": {
+ "version": "6.19.1",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-6.19.1.tgz",
+ "integrity": "sha512-6+bk6FEtBhvfYvpHsDgAL3uo4BfvnTnoge5LrrCj2eJN8g3IJdLTD4B/jK3Q6vo4Ql/Hoip9I8aB6fF+6RfDqg==",
+ "dev": true,
+ "engines": {
+ "node": "^16.0.0 || >=18.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/typescript-eslint"
+ }
+ },
+ "node_modules/@typescript-eslint/type-utils/node_modules/@typescript-eslint/typescript-estree": {
+ "version": "6.19.1",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-6.19.1.tgz",
+ "integrity": "sha512-aFdAxuhzBFRWhy+H20nYu19+Km+gFfwNO4TEqyszkMcgBDYQjmPJ61erHxuT2ESJXhlhrO7I5EFIlZ+qGR8oVA==",
+ "dev": true,
+ "dependencies": {
+ "@typescript-eslint/types": "6.19.1",
+ "@typescript-eslint/visitor-keys": "6.19.1",
+ "debug": "^4.3.4",
+ "globby": "^11.1.0",
+ "is-glob": "^4.0.3",
+ "minimatch": "9.0.3",
+ "semver": "^7.5.4",
+ "ts-api-utils": "^1.0.1"
+ },
+ "engines": {
+ "node": "^16.0.0 || >=18.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/typescript-eslint"
+ },
+ "peerDependenciesMeta": {
+ "typescript": {
+ "optional": true
+ }
+ }
+ },
+ "node_modules/@typescript-eslint/type-utils/node_modules/@typescript-eslint/visitor-keys": {
+ "version": "6.19.1",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-6.19.1.tgz",
+ "integrity": "sha512-gkdtIO+xSO/SmI0W68DBg4u1KElmIUo3vXzgHyGPs6cxgB0sa3TlptRAAE0hUY1hM6FcDKEv7aIwiTGm76cXfQ==",
+ "dev": true,
+ "dependencies": {
+ "@typescript-eslint/types": "6.19.1",
+ "eslint-visitor-keys": "^3.4.1"
+ },
+ "engines": {
+ "node": "^16.0.0 || >=18.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/typescript-eslint"
+ }
+ },
+ "node_modules/@typescript-eslint/type-utils/node_modules/brace-expansion": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz",
+ "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==",
+ "dev": true,
+ "dependencies": {
+ "balanced-match": "^1.0.0"
+ }
+ },
+ "node_modules/@typescript-eslint/type-utils/node_modules/minimatch": {
+ "version": "9.0.3",
+ "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.3.tgz",
+ "integrity": "sha512-RHiac9mvaRw0x3AYRgDC1CxAP7HTcNrrECeA8YYJeWnpo+2Q5CegtZjaotWTWxDG3UeGA1coE05iH1mPjT/2mg==",
+ "dev": true,
+ "dependencies": {
+ "brace-expansion": "^2.0.1"
+ },
+ "engines": {
+ "node": ">=16 || 14 >=14.17"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/isaacs"
+ }
+ },
"node_modules/@typescript-eslint/types": {
"version": "6.18.1",
"resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-6.18.1.tgz",
@@ -2506,17 +2734,17 @@
}
},
"node_modules/@typescript-eslint/utils": {
- "version": "6.18.1",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-6.18.1.tgz",
- "integrity": "sha512-zZmTuVZvD1wpoceHvoQpOiewmWu3uP9FuTWo8vqpy2ffsmfCE8mklRPi+vmnIYAIk9t/4kOThri2QCDgor+OpQ==",
+ "version": "6.19.1",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-6.19.1.tgz",
+ "integrity": "sha512-JvjfEZuP5WoMqwh9SPAPDSHSg9FBHHGhjPugSRxu5jMfjvBpq5/sGTD+9M9aQ5sh6iJ8AY/Kk/oUYVEMAPwi7w==",
"dev": true,
"dependencies": {
"@eslint-community/eslint-utils": "^4.4.0",
"@types/json-schema": "^7.0.12",
"@types/semver": "^7.5.0",
- "@typescript-eslint/scope-manager": "6.18.1",
- "@typescript-eslint/types": "6.18.1",
- "@typescript-eslint/typescript-estree": "6.18.1",
+ "@typescript-eslint/scope-manager": "6.19.1",
+ "@typescript-eslint/types": "6.19.1",
+ "@typescript-eslint/typescript-estree": "6.19.1",
"semver": "^7.5.4"
},
"engines": {
@@ -2530,6 +2758,105 @@
"eslint": "^7.0.0 || ^8.0.0"
}
},
+ "node_modules/@typescript-eslint/utils/node_modules/@typescript-eslint/scope-manager": {
+ "version": "6.19.1",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-6.19.1.tgz",
+ "integrity": "sha512-4CdXYjKf6/6aKNMSly/BP4iCSOpvMmqtDzRtqFyyAae3z5kkqEjKndR5vDHL8rSuMIIWP8u4Mw4VxLyxZW6D5w==",
+ "dev": true,
+ "dependencies": {
+ "@typescript-eslint/types": "6.19.1",
+ "@typescript-eslint/visitor-keys": "6.19.1"
+ },
+ "engines": {
+ "node": "^16.0.0 || >=18.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/typescript-eslint"
+ }
+ },
+ "node_modules/@typescript-eslint/utils/node_modules/@typescript-eslint/types": {
+ "version": "6.19.1",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-6.19.1.tgz",
+ "integrity": "sha512-6+bk6FEtBhvfYvpHsDgAL3uo4BfvnTnoge5LrrCj2eJN8g3IJdLTD4B/jK3Q6vo4Ql/Hoip9I8aB6fF+6RfDqg==",
+ "dev": true,
+ "engines": {
+ "node": "^16.0.0 || >=18.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/typescript-eslint"
+ }
+ },
+ "node_modules/@typescript-eslint/utils/node_modules/@typescript-eslint/typescript-estree": {
+ "version": "6.19.1",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-6.19.1.tgz",
+ "integrity": "sha512-aFdAxuhzBFRWhy+H20nYu19+Km+gFfwNO4TEqyszkMcgBDYQjmPJ61erHxuT2ESJXhlhrO7I5EFIlZ+qGR8oVA==",
+ "dev": true,
+ "dependencies": {
+ "@typescript-eslint/types": "6.19.1",
+ "@typescript-eslint/visitor-keys": "6.19.1",
+ "debug": "^4.3.4",
+ "globby": "^11.1.0",
+ "is-glob": "^4.0.3",
+ "minimatch": "9.0.3",
+ "semver": "^7.5.4",
+ "ts-api-utils": "^1.0.1"
+ },
+ "engines": {
+ "node": "^16.0.0 || >=18.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/typescript-eslint"
+ },
+ "peerDependenciesMeta": {
+ "typescript": {
+ "optional": true
+ }
+ }
+ },
+ "node_modules/@typescript-eslint/utils/node_modules/@typescript-eslint/visitor-keys": {
+ "version": "6.19.1",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-6.19.1.tgz",
+ "integrity": "sha512-gkdtIO+xSO/SmI0W68DBg4u1KElmIUo3vXzgHyGPs6cxgB0sa3TlptRAAE0hUY1hM6FcDKEv7aIwiTGm76cXfQ==",
+ "dev": true,
+ "dependencies": {
+ "@typescript-eslint/types": "6.19.1",
+ "eslint-visitor-keys": "^3.4.1"
+ },
+ "engines": {
+ "node": "^16.0.0 || >=18.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/typescript-eslint"
+ }
+ },
+ "node_modules/@typescript-eslint/utils/node_modules/brace-expansion": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz",
+ "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==",
+ "dev": true,
+ "dependencies": {
+ "balanced-match": "^1.0.0"
+ }
+ },
+ "node_modules/@typescript-eslint/utils/node_modules/minimatch": {
+ "version": "9.0.3",
+ "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.3.tgz",
+ "integrity": "sha512-RHiac9mvaRw0x3AYRgDC1CxAP7HTcNrrECeA8YYJeWnpo+2Q5CegtZjaotWTWxDG3UeGA1coE05iH1mPjT/2mg==",
+ "dev": true,
+ "dependencies": {
+ "brace-expansion": "^2.0.1"
+ },
+ "engines": {
+ "node": ">=16 || 14 >=14.17"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/isaacs"
+ }
+ },
"node_modules/@typescript-eslint/visitor-keys": {
"version": "6.18.1",
"resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-6.18.1.tgz",
@@ -5952,9 +6279,9 @@
"optional": true
},
"node_modules/vite": {
- "version": "5.0.11",
- "resolved": "https://registry.npmjs.org/vite/-/vite-5.0.11.tgz",
- "integrity": "sha512-XBMnDjZcNAw/G1gEiskiM1v6yzM4GE5aMGvhWTlHAYYhxb7S3/V1s3m2LDHa8Vh6yIWYYB0iJwsEaS523c4oYA==",
+ "version": "5.0.12",
+ "resolved": "https://registry.npmjs.org/vite/-/vite-5.0.12.tgz",
+ "integrity": "sha512-4hsnEkG3q0N4Tzf1+t6NdN9dg/L3BM+q8SWgbSPnJvrgH2kgdyzfVJwbR1ic69/4uMJJ/3dqDZZE5/WwqW8U1w==",
"dev": true,
"dependencies": {
"esbuild": "^0.19.3",
diff --git a/playground/package.json b/playground/package.json
index 2532afe5af..50246ea9c4 100644
--- a/playground/package.json
+++ b/playground/package.json
@@ -20,13 +20,13 @@
"@types/react": "^18.2.48",
"@types/react-dom": "^18.2.18",
"@types/swagger-ui-react": "^4.18.3",
- "@typescript-eslint/parser": "^6.18.1",
- "@typescript-eslint/eslint-plugin": "^6.18.1",
+ "@typescript-eslint/eslint-plugin": "^6.19.1",
+ "@typescript-eslint/parser": "^6.19.1",
"@vitejs/plugin-react-swc": "^3.5.0",
"eslint": "^8.56.0",
"eslint-plugin-react-hooks": "^4.6.0",
"eslint-plugin-react-refresh": "^0.4.5",
"typescript": "^5.3.3",
- "vite": "^5.0.11"
+ "vite": "^5.0.12"
}
}
diff --git a/request/graphql/schema/collection.go b/request/graphql/schema/collection.go
index 36f4d61c71..ad4f7bb855 100644
--- a/request/graphql/schema/collection.go
+++ b/request/graphql/schema/collection.go
@@ -15,6 +15,8 @@ import (
"fmt"
"sort"
+ "github.com/sourcenetwork/immutable"
+
"github.com/sourcenetwork/defradb/client"
"github.com/sourcenetwork/defradb/client/request"
"github.com/sourcenetwork/defradb/request/graphql/schema/types"
@@ -151,7 +153,7 @@ func collectionFromAstDefinition(
return client.CollectionDefinition{
Description: client.CollectionDescription{
- Name: def.Name.Value,
+ Name: immutable.Some(def.Name.Value),
Indexes: indexDescriptions,
},
Schema: client.SchemaDescription{
@@ -413,6 +415,7 @@ func astTypeToKind(t ast.Type) (client.FieldKind, error) {
typeDateTime string = "DateTime"
typeString string = "String"
typeBlob string = "Blob"
+ typeJSON string = "JSON"
)
switch astTypeVal := t.(type) {
@@ -463,6 +466,8 @@ func astTypeToKind(t ast.Type) (client.FieldKind, error) {
return client.FieldKind_STRING, nil
case typeBlob:
return client.FieldKind_BLOB, nil
+ case typeJSON:
+ return client.FieldKind_JSON, nil
default:
return client.FieldKind_FOREIGN_OBJECT, nil
}
@@ -513,7 +518,7 @@ func getRelationshipName(
func finalizeRelations(relationManager *RelationManager, definitions []client.CollectionDefinition) error {
embeddedObjNames := map[string]struct{}{}
for _, def := range definitions {
- if def.Description.Name == "" {
+ if !def.Description.Name.HasValue() {
embeddedObjNames[def.Schema.Name] = struct{}{}
}
}
diff --git a/request/graphql/schema/descriptions.go b/request/graphql/schema/descriptions.go
index 147c494c74..01b1d8b0cb 100644
--- a/request/graphql/schema/descriptions.go
+++ b/request/graphql/schema/descriptions.go
@@ -34,6 +34,7 @@ var (
&gql.List{}: client.FieldKind_FOREIGN_OBJECT_ARRAY,
// Custom scalars
schemaTypes.BlobScalarType: client.FieldKind_BLOB,
+ schemaTypes.JSONScalarType: client.FieldKind_JSON,
// More custom ones to come
// - JSON
// - Counters
@@ -55,6 +56,7 @@ var (
client.FieldKind_STRING_ARRAY: gql.NewList(gql.NewNonNull(gql.String)),
client.FieldKind_NILLABLE_STRING_ARRAY: gql.NewList(gql.String),
client.FieldKind_BLOB: schemaTypes.BlobScalarType,
+ client.FieldKind_JSON: schemaTypes.JSONScalarType,
}
// This map is fine to use
@@ -74,6 +76,7 @@ var (
client.FieldKind_STRING_ARRAY: client.LWW_REGISTER,
client.FieldKind_NILLABLE_STRING_ARRAY: client.LWW_REGISTER,
client.FieldKind_BLOB: client.LWW_REGISTER,
+ client.FieldKind_JSON: client.LWW_REGISTER,
client.FieldKind_FOREIGN_OBJECT: client.LWW_REGISTER,
client.FieldKind_FOREIGN_OBJECT_ARRAY: client.NONE_CRDT,
}
diff --git a/request/graphql/schema/descriptions_test.go b/request/graphql/schema/descriptions_test.go
index 397436bca2..39a7b5cce2 100644
--- a/request/graphql/schema/descriptions_test.go
+++ b/request/graphql/schema/descriptions_test.go
@@ -14,6 +14,7 @@ import (
"context"
"testing"
+ "github.com/sourcenetwork/immutable"
"github.com/stretchr/testify/assert"
"github.com/sourcenetwork/defradb/client"
@@ -33,7 +34,7 @@ func TestSingleSimpleType(t *testing.T) {
targetDescs: []client.CollectionDefinition{
{
Description: client.CollectionDescription{
- Name: "User",
+ Name: immutable.Some("User"),
Indexes: []client.IndexDescription{},
},
Schema: client.SchemaDescription{
@@ -82,7 +83,7 @@ func TestSingleSimpleType(t *testing.T) {
targetDescs: []client.CollectionDefinition{
{
Description: client.CollectionDescription{
- Name: "User",
+ Name: immutable.Some("User"),
Indexes: []client.IndexDescription{},
},
Schema: client.SchemaDescription{
@@ -113,7 +114,7 @@ func TestSingleSimpleType(t *testing.T) {
},
{
Description: client.CollectionDescription{
- Name: "Author",
+ Name: immutable.Some("Author"),
Indexes: []client.IndexDescription{},
},
Schema: client.SchemaDescription{
@@ -162,7 +163,7 @@ func TestSingleSimpleType(t *testing.T) {
targetDescs: []client.CollectionDefinition{
{
Description: client.CollectionDescription{
- Name: "Book",
+ Name: immutable.Some("Book"),
Indexes: []client.IndexDescription{},
},
Schema: client.SchemaDescription{
@@ -202,7 +203,7 @@ func TestSingleSimpleType(t *testing.T) {
},
{
Description: client.CollectionDescription{
- Name: "Author",
+ Name: immutable.Some("Author"),
Indexes: []client.IndexDescription{},
},
Schema: client.SchemaDescription{
@@ -260,7 +261,7 @@ func TestSingleSimpleType(t *testing.T) {
targetDescs: []client.CollectionDefinition{
{
Description: client.CollectionDescription{
- Name: "User",
+ Name: immutable.Some("User"),
Indexes: []client.IndexDescription{},
},
Schema: client.SchemaDescription{
@@ -291,7 +292,7 @@ func TestSingleSimpleType(t *testing.T) {
},
{
Description: client.CollectionDescription{
- Name: "Author",
+ Name: immutable.Some("Author"),
Indexes: []client.IndexDescription{},
},
Schema: client.SchemaDescription{
@@ -340,7 +341,7 @@ func TestSingleSimpleType(t *testing.T) {
targetDescs: []client.CollectionDefinition{
{
Description: client.CollectionDescription{
- Name: "Book",
+ Name: immutable.Some("Book"),
Indexes: []client.IndexDescription{},
},
Schema: client.SchemaDescription{
@@ -380,7 +381,7 @@ func TestSingleSimpleType(t *testing.T) {
},
{
Description: client.CollectionDescription{
- Name: "Author",
+ Name: immutable.Some("Author"),
Indexes: []client.IndexDescription{},
},
Schema: client.SchemaDescription{
@@ -438,7 +439,7 @@ func TestSingleSimpleType(t *testing.T) {
targetDescs: []client.CollectionDefinition{
{
Description: client.CollectionDescription{
- Name: "Book",
+ Name: immutable.Some("Book"),
Indexes: []client.IndexDescription{},
},
Schema: client.SchemaDescription{
@@ -478,7 +479,7 @@ func TestSingleSimpleType(t *testing.T) {
},
{
Description: client.CollectionDescription{
- Name: "Author",
+ Name: immutable.Some("Author"),
Indexes: []client.IndexDescription{},
},
Schema: client.SchemaDescription{
@@ -536,7 +537,7 @@ func TestSingleSimpleType(t *testing.T) {
targetDescs: []client.CollectionDefinition{
{
Description: client.CollectionDescription{
- Name: "Book",
+ Name: immutable.Some("Book"),
Indexes: []client.IndexDescription{},
},
Schema: client.SchemaDescription{
@@ -576,7 +577,7 @@ func TestSingleSimpleType(t *testing.T) {
},
{
Description: client.CollectionDescription{
- Name: "Author",
+ Name: immutable.Some("Author"),
Indexes: []client.IndexDescription{},
},
Schema: client.SchemaDescription{
diff --git a/request/graphql/schema/generate.go b/request/graphql/schema/generate.go
index 1083772d58..32fe562cff 100644
--- a/request/graphql/schema/generate.go
+++ b/request/graphql/schema/generate.go
@@ -108,7 +108,7 @@ func (g *Generator) generate(ctx context.Context, collections []client.Collectio
var isEmbedded bool
for _, definition := range collections {
- if t.Name() == definition.Schema.Name && definition.Description.Name == "" {
+ if t.Name() == definition.Schema.Name && !definition.Description.Name.HasValue() {
isEmbedded = true
break
}
@@ -194,8 +194,8 @@ func (g *Generator) generate(ctx context.Context, collections []client.Collectio
var isReadOnly bool
var collectionFound bool
for _, definition := range collections {
- if t.Name() == definition.Description.Name {
- isReadOnly = definition.Description.BaseQuery != nil
+ if t.Name() == definition.Description.Name.Value() {
+ isReadOnly = len(definition.Description.QuerySources()) > 0
collectionFound = true
break
}
@@ -416,15 +416,16 @@ func (g *Generator) buildTypes(
// TODO remove when Go 1.22
collection := c
fieldDescriptions := collection.Schema.Fields
- isEmbeddedObject := collection.Description.Name == ""
- isViewObject := isEmbeddedObject || collection.Description.BaseQuery != nil
+ isEmbeddedObject := !collection.Description.Name.HasValue()
+ isQuerySource := len(collection.Description.QuerySources()) > 0
+ isViewObject := isEmbeddedObject || isQuerySource
var objectName string
if isEmbeddedObject {
// If this is an embedded object, take the type name from the Schema
objectName = collection.Schema.Name
} else {
- objectName = collection.Description.Name
+ objectName = collection.Description.Name.Value()
}
// check if type exists
@@ -529,7 +530,7 @@ func (g *Generator) buildTypes(
// for collection create and update mutation operations.
func (g *Generator) buildMutationInputTypes(collections []client.CollectionDefinition) error {
for _, c := range collections {
- if c.Description.Name == "" {
+ if !c.Description.Name.HasValue() {
// If the definition's collection is empty, this must be a collectionless
// schema, in which case users cannot mutate documents through it and we
// have no need to build mutation input types for it.
@@ -541,7 +542,7 @@ func (g *Generator) buildMutationInputTypes(collections []client.CollectionDefin
// TODO remove when Go 1.22
collection := c
fieldDescriptions := collection.Schema.Fields
- mutationInputName := collection.Description.Name + "MutationInputArg"
+ mutationInputName := collection.Description.Name.Value() + "MutationInputArg"
// check if mutation input type exists
if _, ok := g.manager.schema.TypeMap()[mutationInputName]; ok {
diff --git a/request/graphql/schema/types/scalars.go b/request/graphql/schema/types/scalars.go
index a0e9dca369..6d70330456 100644
--- a/request/graphql/schema/types/scalars.go
+++ b/request/graphql/schema/types/scalars.go
@@ -16,6 +16,7 @@ import (
"github.com/sourcenetwork/graphql-go"
"github.com/sourcenetwork/graphql-go/language/ast"
+ "github.com/valyala/fastjson"
)
// BlobPattern is a regex for validating blob hex strings
@@ -63,3 +64,55 @@ var BlobScalarType = graphql.NewScalar(graphql.ScalarConfig{
}
},
})
+
+// coerceBlob converts the given value into a valid json string.
+// If the value cannot be converted nil is returned.
+func coerceJSON(value any) any {
+ switch value := value.(type) {
+ case []byte:
+ err := fastjson.ValidateBytes(value)
+ if err != nil {
+ // ignore this error because the value
+ // cannot be converted to a json string
+ return nil
+ }
+ return string(value)
+
+ case *[]byte:
+ return coerceJSON(*value)
+
+ case string:
+ err := fastjson.Validate(value)
+ if err != nil {
+ // ignore this error because the value
+ // cannot be converted to a json string
+ return nil
+ }
+ return value
+
+ case *string:
+ return coerceJSON(*value)
+
+ default:
+ return nil
+ }
+}
+
+var JSONScalarType = graphql.NewScalar(graphql.ScalarConfig{
+ Name: "JSON",
+ Description: "The `JSON` scalar type represents a JSON string.",
+ // Serialize converts the value to a json string
+ Serialize: coerceJSON,
+ // ParseValue converts the value to a json string
+ ParseValue: coerceJSON,
+ // ParseLiteral converts the ast value to a json string
+ ParseLiteral: func(valueAST ast.Value) any {
+ switch valueAST := valueAST.(type) {
+ case *ast.StringValue:
+ return coerceJSON(valueAST.Value)
+ default:
+ // return nil if the value cannot be parsed
+ return nil
+ }
+ },
+})
diff --git a/request/graphql/schema/types/scalars_test.go b/request/graphql/schema/types/scalars_test.go
index 5126f2e6a2..6be3fa23fa 100644
--- a/request/graphql/schema/types/scalars_test.go
+++ b/request/graphql/schema/types/scalars_test.go
@@ -86,3 +86,86 @@ func TestBlobScalarTypeParseLiteral(t *testing.T) {
assert.Equal(t, c.expect, result)
}
}
+
+func TestJSONScalarTypeParseAndSerialize(t *testing.T) {
+ validString := `"hello"`
+ validBytes := []byte(`"hello"`)
+
+ boolString := "true"
+ boolBytes := []byte("true")
+
+ intString := "0"
+ intBytes := []byte("0")
+
+ floatString := "3.14"
+ floatBytes := []byte("3.14")
+
+ objectString := `{"name": "Bob"}`
+ objectBytes := []byte(`{"name": "Bob"}`)
+
+ invalidString := "invalid"
+ invalidBytes := []byte("invalid")
+
+ cases := []struct {
+ input any
+ expect any
+ }{
+ {validString, `"hello"`},
+ {&validString, `"hello"`},
+ {validBytes, `"hello"`},
+ {&validBytes, `"hello"`},
+ {boolString, "true"},
+ {&boolString, "true"},
+ {boolBytes, "true"},
+ {&boolBytes, "true"},
+ {[]byte("true"), "true"},
+ {[]byte("false"), "false"},
+ {intString, "0"},
+ {&intString, "0"},
+ {intBytes, "0"},
+ {&intBytes, "0"},
+ {floatString, "3.14"},
+ {&floatString, "3.14"},
+ {floatBytes, "3.14"},
+ {&floatBytes, "3.14"},
+ {invalidString, nil},
+ {&invalidString, nil},
+ {invalidBytes, nil},
+ {&invalidBytes, nil},
+ {objectString, `{"name": "Bob"}`},
+ {&objectString, `{"name": "Bob"}`},
+ {objectBytes, `{"name": "Bob"}`},
+ {&objectBytes, `{"name": "Bob"}`},
+ {nil, nil},
+ {0, nil},
+ {false, nil},
+ }
+ for _, c := range cases {
+ parsed := JSONScalarType.ParseValue(c.input)
+ assert.Equal(t, c.expect, parsed)
+
+ serialized := JSONScalarType.Serialize(c.input)
+ assert.Equal(t, c.expect, serialized)
+ }
+}
+
+func TestJSONScalarTypeParseLiteral(t *testing.T) {
+ cases := []struct {
+ input ast.Value
+ expect any
+ }{
+ {&ast.StringValue{Value: "0"}, "0"},
+ {&ast.StringValue{Value: "invalid"}, nil},
+ {&ast.IntValue{}, nil},
+ {&ast.BooleanValue{}, nil},
+ {&ast.NullValue{}, nil},
+ {&ast.EnumValue{}, nil},
+ {&ast.FloatValue{}, nil},
+ {&ast.ListValue{}, nil},
+ {&ast.ObjectValue{}, nil},
+ }
+ for _, c := range cases {
+ result := JSONScalarType.ParseLiteral(c.input)
+ assert.Equal(t, c.expect, result)
+ }
+}
diff --git a/tests/clients/cli/wrapper_collection.go b/tests/clients/cli/wrapper_collection.go
index 8295bad8d7..4d94d7b20f 100644
--- a/tests/clients/cli/wrapper_collection.go
+++ b/tests/clients/cli/wrapper_collection.go
@@ -16,6 +16,8 @@ import (
"fmt"
"strings"
+ "github.com/sourcenetwork/immutable"
+
"github.com/sourcenetwork/defradb/client"
"github.com/sourcenetwork/defradb/client/request"
"github.com/sourcenetwork/defradb/datastore"
@@ -34,7 +36,7 @@ func (c *Collection) Description() client.CollectionDescription {
return c.def.Description
}
-func (c *Collection) Name() string {
+func (c *Collection) Name() immutable.Option[string] {
return c.Description().Name
}
@@ -55,8 +57,12 @@ func (c *Collection) Definition() client.CollectionDefinition {
}
func (c *Collection) Create(ctx context.Context, doc *client.Document) error {
+ if !c.Description().Name.HasValue() {
+ return client.ErrOperationNotPermittedOnNamelessCols
+ }
+
args := []string{"client", "collection", "create"}
- args = append(args, "--name", c.Description().Name)
+ args = append(args, "--name", c.Description().Name.Value())
document, err := doc.String()
if err != nil {
@@ -73,8 +79,12 @@ func (c *Collection) Create(ctx context.Context, doc *client.Document) error {
}
func (c *Collection) CreateMany(ctx context.Context, docs []*client.Document) error {
+ if !c.Description().Name.HasValue() {
+ return client.ErrOperationNotPermittedOnNamelessCols
+ }
+
args := []string{"client", "collection", "create"}
- args = append(args, "--name", c.Description().Name)
+ args = append(args, "--name", c.Description().Name.Value())
docMapList := make([]map[string]any, len(docs))
for i, doc := range docs {
@@ -101,8 +111,12 @@ func (c *Collection) CreateMany(ctx context.Context, docs []*client.Document) er
}
func (c *Collection) Update(ctx context.Context, doc *client.Document) error {
+ if !c.Description().Name.HasValue() {
+ return client.ErrOperationNotPermittedOnNamelessCols
+ }
+
args := []string{"client", "collection", "update"}
- args = append(args, "--name", c.Description().Name)
+ args = append(args, "--name", c.Description().Name.Value())
args = append(args, "--docID", doc.ID().String())
document, err := doc.ToJSONPatch()
@@ -179,8 +193,12 @@ func (c *Collection) UpdateWithFilter(
filter any,
updater string,
) (*client.UpdateResult, error) {
+ if !c.Description().Name.HasValue() {
+ return nil, client.ErrOperationNotPermittedOnNamelessCols
+ }
+
args := []string{"client", "collection", "update"}
- args = append(args, "--name", c.Description().Name)
+ args = append(args, "--name", c.Description().Name.Value())
args = append(args, "--updater", updater)
filterJSON, err := json.Marshal(filter)
@@ -197,8 +215,12 @@ func (c *Collection) UpdateWithDocID(
docID client.DocID,
updater string,
) (*client.UpdateResult, error) {
+ if !c.Description().Name.HasValue() {
+ return nil, client.ErrOperationNotPermittedOnNamelessCols
+ }
+
args := []string{"client", "collection", "update"}
- args = append(args, "--name", c.Description().Name)
+ args = append(args, "--name", c.Description().Name.Value())
args = append(args, "--docID", docID.String())
args = append(args, "--updater", updater)
@@ -210,8 +232,12 @@ func (c *Collection) UpdateWithDocIDs(
docIDs []client.DocID,
updater string,
) (*client.UpdateResult, error) {
+ if !c.Description().Name.HasValue() {
+ return nil, client.ErrOperationNotPermittedOnNamelessCols
+ }
+
args := []string{"client", "collection", "update"}
- args = append(args, "--name", c.Description().Name)
+ args = append(args, "--name", c.Description().Name.Value())
args = append(args, "--updater", updater)
strDocIDs := make([]string, len(docIDs))
@@ -252,8 +278,12 @@ func (c *Collection) deleteWith(
}
func (c *Collection) DeleteWithFilter(ctx context.Context, filter any) (*client.DeleteResult, error) {
+ if !c.Description().Name.HasValue() {
+ return nil, client.ErrOperationNotPermittedOnNamelessCols
+ }
+
args := []string{"client", "collection", "delete"}
- args = append(args, "--name", c.Description().Name)
+ args = append(args, "--name", c.Description().Name.Value())
filterJSON, err := json.Marshal(filter)
if err != nil {
@@ -265,16 +295,24 @@ func (c *Collection) DeleteWithFilter(ctx context.Context, filter any) (*client.
}
func (c *Collection) DeleteWithDocID(ctx context.Context, docID client.DocID) (*client.DeleteResult, error) {
+ if !c.Description().Name.HasValue() {
+ return nil, client.ErrOperationNotPermittedOnNamelessCols
+ }
+
args := []string{"client", "collection", "delete"}
- args = append(args, "--name", c.Description().Name)
+ args = append(args, "--name", c.Description().Name.Value())
args = append(args, "--docID", docID.String())
return c.deleteWith(ctx, args)
}
func (c *Collection) DeleteWithDocIDs(ctx context.Context, docIDs []client.DocID) (*client.DeleteResult, error) {
+ if !c.Description().Name.HasValue() {
+ return nil, client.ErrOperationNotPermittedOnNamelessCols
+ }
+
args := []string{"client", "collection", "delete"}
- args = append(args, "--name", c.Description().Name)
+ args = append(args, "--name", c.Description().Name.Value())
strDocIDs := make([]string, len(docIDs))
for i, v := range docIDs {
@@ -286,8 +324,12 @@ func (c *Collection) DeleteWithDocIDs(ctx context.Context, docIDs []client.DocID
}
func (c *Collection) Get(ctx context.Context, docID client.DocID, showDeleted bool) (*client.Document, error) {
+ if !c.Description().Name.HasValue() {
+ return nil, client.ErrOperationNotPermittedOnNamelessCols
+ }
+
args := []string{"client", "collection", "get"}
- args = append(args, "--name", c.Description().Name)
+ args = append(args, "--name", c.Description().Name.Value())
args = append(args, docID.String())
if showDeleted {
@@ -315,8 +357,12 @@ func (c *Collection) WithTxn(tx datastore.Txn) client.Collection {
}
func (c *Collection) GetAllDocIDs(ctx context.Context) (<-chan client.DocIDResult, error) {
+ if !c.Description().Name.HasValue() {
+ return nil, client.ErrOperationNotPermittedOnNamelessCols
+ }
+
args := []string{"client", "collection", "docIDs"}
- args = append(args, "--name", c.Description().Name)
+ args = append(args, "--name", c.Description().Name.Value())
stdOut, _, err := c.cmd.executeStream(ctx, args)
if err != nil {
@@ -354,8 +400,12 @@ func (c *Collection) CreateIndex(
ctx context.Context,
indexDesc client.IndexDescription,
) (index client.IndexDescription, err error) {
+ if !c.Description().Name.HasValue() {
+ return client.IndexDescription{}, client.ErrOperationNotPermittedOnNamelessCols
+ }
+
args := []string{"client", "index", "create"}
- args = append(args, "--collection", c.Description().Name)
+ args = append(args, "--collection", c.Description().Name.Value())
if indexDesc.Name != "" {
args = append(args, "--name", indexDesc.Name)
}
@@ -380,8 +430,12 @@ func (c *Collection) CreateIndex(
}
func (c *Collection) DropIndex(ctx context.Context, indexName string) error {
+ if !c.Description().Name.HasValue() {
+ return client.ErrOperationNotPermittedOnNamelessCols
+ }
+
args := []string{"client", "index", "drop"}
- args = append(args, "--collection", c.Description().Name)
+ args = append(args, "--collection", c.Description().Name.Value())
args = append(args, "--name", indexName)
_, err := c.cmd.execute(ctx, args)
@@ -389,8 +443,12 @@ func (c *Collection) DropIndex(ctx context.Context, indexName string) error {
}
func (c *Collection) GetIndexes(ctx context.Context) ([]client.IndexDescription, error) {
+ if !c.Description().Name.HasValue() {
+ return nil, client.ErrOperationNotPermittedOnNamelessCols
+ }
+
args := []string{"client", "index", "list"}
- args = append(args, "--collection", c.Description().Name)
+ args = append(args, "--collection", c.Description().Name.Value())
data, err := c.cmd.execute(ctx, args)
if err != nil {
diff --git a/tests/gen/cli/gendocs.go b/tests/gen/cli/gendocs.go
index 6d388eaf67..226d73bc97 100644
--- a/tests/gen/cli/gendocs.go
+++ b/tests/gen/cli/gendocs.go
@@ -123,7 +123,7 @@ func saveBatchToCollections(
) error {
for colName, colDocs := range colDocsMap {
for _, col := range collections {
- if col.Description().Name == colName {
+ if col.Description().Name.Value() == colName {
err := col.CreateMany(context.Background(), colDocs)
if err != nil {
return err
@@ -138,7 +138,7 @@ func saveBatchToCollections(
func groupDocsByCollection(docs []gen.GeneratedDoc) map[string][]*client.Document {
result := make(map[string][]*client.Document)
for _, doc := range docs {
- result[doc.Col.Description.Name] = append(result[doc.Col.Description.Name], doc.Doc)
+ result[doc.Col.Description.Name.Value()] = append(result[doc.Col.Description.Name.Value()], doc.Doc)
}
return result
}
diff --git a/tests/gen/gen_auto.go b/tests/gen/gen_auto.go
index 7ad3bb2d41..2dd3f78d8a 100644
--- a/tests/gen/gen_auto.go
+++ b/tests/gen/gen_auto.go
@@ -54,7 +54,7 @@ func AutoGenerate(definitions []client.CollectionDefinition, options ...Option)
}
typeDefs := make(map[string]client.CollectionDefinition)
for _, def := range definitions {
- typeDefs[def.Description.Name] = def
+ typeDefs[def.Description.Name.Value()] = def
}
generator := newRandomDocGenerator(typeDefs, nil)
return generator.generateDocs(options...)
@@ -212,13 +212,13 @@ func validateDefinitions(definitions []client.CollectionDefinition) error {
colNames := make(map[string]struct{})
fieldRefs := []string{}
for _, def := range definitions {
- if def.Description.Name == "" {
+ if def.Description.Name.Value() == "" {
return NewErrIncompleteColDefinition("description name is empty")
}
if def.Schema.Name == "" {
return NewErrIncompleteColDefinition("schema name is empty")
}
- if def.Description.Name != def.Schema.Name {
+ if def.Description.Name.Value() != def.Schema.Name {
return NewErrIncompleteColDefinition("description name and schema name do not match")
}
for _, field := range def.Schema.Fields {
@@ -232,7 +232,7 @@ func validateDefinitions(definitions []client.CollectionDefinition) error {
fieldRefs = append(fieldRefs, field.Schema)
}
}
- colNames[def.Description.Name] = struct{}{}
+ colNames[def.Description.Name.Value()] = struct{}{}
colIDs[def.Description.ID] = struct{}{}
}
for _, ref := range fieldRefs {
diff --git a/tests/gen/gen_auto_config.go b/tests/gen/gen_auto_config.go
index ccebce92d1..1457e67a12 100644
--- a/tests/gen/gen_auto_config.go
+++ b/tests/gen/gen_auto_config.go
@@ -54,7 +54,7 @@ func (m configsMap) AddForField(typeStr, fieldName string, conf genConfig) {
func validateConfig(types map[string]client.CollectionDefinition, configsMap configsMap) error {
for typeName, typeConfigs := range configsMap {
typeDef := types[typeName]
- if typeDef.Description.Name == "" {
+ if typeDef.Description.Name.Value() == "" {
return newNotDefinedTypeErr(typeName)
}
for fieldName, fieldConfig := range typeConfigs {
diff --git a/tests/gen/gen_auto_test.go b/tests/gen/gen_auto_test.go
index f22859df0c..25191bbe41 100644
--- a/tests/gen/gen_auto_test.go
+++ b/tests/gen/gen_auto_test.go
@@ -15,6 +15,7 @@ import (
"testing"
"time"
+ "github.com/sourcenetwork/immutable"
"github.com/stretchr/testify/assert"
"github.com/sourcenetwork/defradb/client"
@@ -71,7 +72,7 @@ func getDocIDsFromDocs(docs []*client.Document) []string {
func filterByCollection(docs []GeneratedDoc, name string) []*client.Document {
var result []*client.Document
for _, doc := range docs {
- if doc.Col.Description.Name == name {
+ if doc.Col.Description.Name.Value() == name {
result = append(result, doc.Doc)
}
}
@@ -1200,7 +1201,7 @@ func TestAutoGenerate_IfCollectionDefinitionIsIncomplete_ReturnError(t *testing.
return []client.CollectionDefinition{
{
Description: client.CollectionDescription{
- Name: "User",
+ Name: immutable.Some("User"),
ID: 0,
},
Schema: client.SchemaDescription{
@@ -1221,7 +1222,7 @@ func TestAutoGenerate_IfCollectionDefinitionIsIncomplete_ReturnError(t *testing.
},
{
Description: client.CollectionDescription{
- Name: "Device",
+ Name: immutable.Some("Device"),
ID: 1,
},
Schema: client.SchemaDescription{
@@ -1252,7 +1253,13 @@ func TestAutoGenerate_IfCollectionDefinitionIsIncomplete_ReturnError(t *testing.
{
name: "description name is empty",
changeDefs: func(defs []client.CollectionDefinition) {
- defs[0].Description.Name = ""
+ defs[0].Description.Name = immutable.Some("")
+ },
+ },
+ {
+ name: "description name is none",
+ changeDefs: func(defs []client.CollectionDefinition) {
+ defs[0].Description.Name = immutable.None[string]()
},
},
{
@@ -1312,7 +1319,7 @@ func TestAutoGenerate_IfColDefinitionsAreValid_ShouldGenerate(t *testing.T) {
defs := []client.CollectionDefinition{
{
Description: client.CollectionDescription{
- Name: "User",
+ Name: immutable.Some("User"),
ID: 0,
},
Schema: client.SchemaDescription{
@@ -1341,7 +1348,7 @@ func TestAutoGenerate_IfColDefinitionsAreValid_ShouldGenerate(t *testing.T) {
},
{
Description: client.CollectionDescription{
- Name: "Device",
+ Name: immutable.Some("Device"),
ID: 1,
},
Schema: client.SchemaDescription{
diff --git a/tests/gen/schema_parser.go b/tests/gen/schema_parser.go
index 216376c26d..bcce388f22 100644
--- a/tests/gen/schema_parser.go
+++ b/tests/gen/schema_parser.go
@@ -31,7 +31,7 @@ func parseSDL(gqlSDL string) (map[string]client.CollectionDefinition, error) {
}
result := make(map[string]client.CollectionDefinition)
for _, col := range cols {
- result[col.Description.Name] = col
+ result[col.Description.Name.Value()] = col
}
return result, nil
}
diff --git a/tests/integration/explain_result_asserter.go b/tests/integration/explain_result_asserter.go
index 30126d4fe4..45f998e481 100644
--- a/tests/integration/explain_result_asserter.go
+++ b/tests/integration/explain_result_asserter.go
@@ -59,15 +59,15 @@ func (a *ExplainResultAsserter) Assert(t *testing.T, result []dataMap) {
require.Len(t, result, 1, "Expected len(result) = 1, got %d", len(result))
explainNode, ok := result[0]["explain"].(dataMap)
require.True(t, ok, "Expected explain none")
- assert.Equal(t, explainNode["executionSuccess"], true, "Expected executionSuccess property")
+ assert.Equal(t, true, explainNode["executionSuccess"], "Expected executionSuccess property")
if a.sizeOfResults.HasValue() {
actual := explainNode["sizeOfResult"]
- assert.Equal(t, actual, a.sizeOfResults.Value(),
+ assert.Equal(t, a.sizeOfResults.Value(), actual,
"Expected %d sizeOfResult, got %d", a.sizeOfResults.Value(), actual)
}
if a.planExecutions.HasValue() {
actual := explainNode["planExecutions"]
- assert.Equal(t, actual, a.planExecutions.Value(),
+ assert.Equal(t, a.planExecutions.Value(), actual,
"Expected %d planExecutions, got %d", a.planExecutions.Value(), actual)
}
selectTopNode, ok := explainNode["selectTopNode"].(dataMap)
@@ -78,7 +78,7 @@ func (a *ExplainResultAsserter) Assert(t *testing.T, result []dataMap) {
if a.filterMatches.HasValue() {
filterMatches, hasFilterMatches := selectNode["filterMatches"]
require.True(t, hasFilterMatches, "Expected filterMatches property")
- assert.Equal(t, filterMatches, uint64(a.filterMatches.Value()),
+ assert.Equal(t, uint64(a.filterMatches.Value()), filterMatches,
"Expected %d filterMatches, got %d", a.filterMatches, filterMatches)
}
@@ -102,22 +102,22 @@ func (a *ExplainResultAsserter) Assert(t *testing.T, result []dataMap) {
if a.iterations.HasValue() {
actual := getScanNodesProp(iterationsProp)
- assert.Equal(t, actual, uint64(a.iterations.Value()),
+ assert.Equal(t, uint64(a.iterations.Value()), actual,
"Expected %d iterations, got %d", a.iterations.Value(), actual)
}
if a.docFetches.HasValue() {
actual := getScanNodesProp(docFetchesProp)
- assert.Equal(t, actual, uint64(a.docFetches.Value()),
+ assert.Equal(t, uint64(a.docFetches.Value()), actual,
"Expected %d docFetches, got %d", a.docFetches.Value(), actual)
}
if a.fieldFetches.HasValue() {
actual := getScanNodesProp(fieldFetchesProp)
- assert.Equal(t, actual, uint64(a.fieldFetches.Value()),
+ assert.Equal(t, uint64(a.fieldFetches.Value()), actual,
"Expected %d fieldFetches, got %d", a.fieldFetches.Value(), actual)
}
if a.indexFetches.HasValue() {
actual := getScanNodesProp(indexFetchesProp)
- assert.Equal(t, actual, uint64(a.indexFetches.Value()),
+ assert.Equal(t, uint64(a.indexFetches.Value()), actual,
"Expected %d indexFetches, got %d", a.indexFetches.Value(), actual)
}
}
diff --git a/tests/integration/index/create_composite_test.go b/tests/integration/index/create_composite_test.go
new file mode 100644
index 0000000000..c20b1b1240
--- /dev/null
+++ b/tests/integration/index/create_composite_test.go
@@ -0,0 +1,76 @@
+// Copyright 2023 Democratized Data Foundation
+//
+// Use of this software is governed by the Business Source License
+// included in the file licenses/BSL.txt.
+//
+// As of the Change Date specified in that file, in accordance with
+// the Business Source License, use of this software will be governed
+// by the Apache License, Version 2.0, included in the file
+// licenses/APL.txt.
+
+package index
+
+import (
+ "testing"
+
+ "github.com/sourcenetwork/defradb/client"
+ testUtils "github.com/sourcenetwork/defradb/tests/integration"
+)
+
+func TestCompositeIndexCreate_WhenCreated_CanRetrieve(t *testing.T) {
+ test := testUtils.TestCase{
+ Description: "create composite index and retrieve it",
+ Actions: []any{
+ testUtils.SchemaUpdate{
+ Schema: `
+ type User {
+ name: String
+ age: Int
+ }
+ `,
+ },
+ testUtils.CreateDoc{
+ CollectionID: 0,
+ Doc: `
+ {
+ "name": "John",
+ "age": 21
+ }`,
+ },
+ testUtils.CreateDoc{
+ CollectionID: 0,
+ Doc: `
+ {
+ "name": "Shahzad",
+ "age": 22
+ }`,
+ },
+ testUtils.CreateIndex{
+ CollectionID: 0,
+ IndexName: "name_age_index",
+ FieldsNames: []string{"name", "age"},
+ },
+ testUtils.GetIndexes{
+ CollectionID: 0,
+ ExpectedIndexes: []client.IndexDescription{
+ {
+ Name: "name_age_index",
+ ID: 1,
+ Fields: []client.IndexedFieldDescription{
+ {
+ Name: "name",
+ Direction: client.Ascending,
+ },
+ {
+ Name: "age",
+ Direction: client.Ascending,
+ },
+ },
+ },
+ },
+ },
+ },
+ }
+
+ testUtils.ExecuteTestCase(t, test)
+}
diff --git a/tests/integration/index/create_unique_composite_test.go b/tests/integration/index/create_unique_composite_test.go
new file mode 100644
index 0000000000..3d146eb591
--- /dev/null
+++ b/tests/integration/index/create_unique_composite_test.go
@@ -0,0 +1,182 @@
+// Copyright 2023 Democratized Data Foundation
+//
+// Use of this software is governed by the Business Source License
+// included in the file licenses/BSL.txt.
+//
+// As of the Change Date specified in that file, in accordance with
+// the Business Source License, use of this software will be governed
+// by the Apache License, Version 2.0, included in the file
+// licenses/APL.txt.
+
+package index
+
+import (
+ "testing"
+
+ "github.com/sourcenetwork/defradb/client"
+ "github.com/sourcenetwork/defradb/db"
+ "github.com/sourcenetwork/defradb/errors"
+ testUtils "github.com/sourcenetwork/defradb/tests/integration"
+)
+
+func TestCreateUniqueCompositeIndex_IfFieldValuesAreNotUnique_ReturnError(t *testing.T) {
+ test := testUtils.TestCase{
+ Description: "If combination of fields is not unique, creating of unique index fails",
+ Actions: []any{
+ testUtils.SchemaUpdate{
+ Schema: `
+ type User {
+ name: String
+ age: Int
+ email: String
+ }
+ `,
+ },
+ testUtils.CreateDoc{
+ CollectionID: 0,
+ Doc: `
+ {
+ "name": "John",
+ "age": 21,
+ "email": "email@gmail.com"
+ }`,
+ },
+ testUtils.CreateDoc{
+ CollectionID: 0,
+ Doc: `
+ {
+ "name": "John",
+ "age": 21,
+ "email": "another@gmail.com"
+ }`,
+ },
+ testUtils.CreateIndex{
+ CollectionID: 0,
+ FieldsNames: []string{"name", "age"},
+ Unique: true,
+ ExpectedError: db.NewErrCanNotIndexNonUniqueFields(
+ "bae-cae3deac-d371-5a1f-93b4-ede69042f79b",
+ errors.NewKV("name", "John"), errors.NewKV("age", 21),
+ ).Error(),
+ },
+ testUtils.GetIndexes{
+ CollectionID: 0,
+ ExpectedIndexes: []client.IndexDescription{},
+ },
+ },
+ }
+
+ testUtils.ExecuteTestCase(t, test)
+}
+
+func TestUniqueCompositeIndexCreate_UponAddingDocWithExistingFieldValue_ReturnError(t *testing.T) {
+ test := testUtils.TestCase{
+ Description: "adding a new doc with existing field combination for composite index should fail",
+ Actions: []any{
+ testUtils.SchemaUpdate{
+ Schema: `
+ type User @index(unique: true, fields: ["name", "age"]) {
+ name: String
+ age: Int
+ email: String
+ }
+ `,
+ },
+ testUtils.CreateDoc{
+ CollectionID: 0,
+ Doc: `
+ {
+ "name": "John",
+ "age": 21,
+ "email": "email@gmail.com"
+ }`,
+ },
+ testUtils.CreateDoc{
+ CollectionID: 0,
+ Doc: `
+ {
+ "name": "John",
+ "age": 21,
+ "email": "another@gmail.com"
+ }`,
+ ExpectedError: db.NewErrCanNotIndexNonUniqueFields(
+ "bae-13254430-7e9e-52e2-9861-9a7ec7a75c8d",
+ errors.NewKV("name", "John"), errors.NewKV("age", 21)).Error(),
+ },
+ },
+ }
+
+ testUtils.ExecuteTestCase(t, test)
+}
+
+func TestUniqueCompositeIndexCreate_IfFieldValuesAreUnique_Succeed(t *testing.T) {
+ test := testUtils.TestCase{
+ Description: "create unique composite index if all docs have unique fields combinations",
+ Actions: []any{
+ testUtils.SchemaUpdate{
+ Schema: `
+ type User {
+ name: String
+ age: Int
+ email: String
+ }
+ `,
+ },
+ testUtils.CreateDoc{
+ CollectionID: 0,
+ Doc: `
+ {
+ "name": "John",
+ "age": 21,
+ "email": "some@gmail.com"
+ }`,
+ },
+ testUtils.CreateDoc{
+ CollectionID: 0,
+ Doc: `
+ {
+ "name": "John",
+ "age": 35,
+ "email": "another@gmail.com"
+ }`,
+ },
+ testUtils.CreateDoc{
+ CollectionID: 0,
+ Doc: `
+ {
+ "name": "Andy",
+ "age": 35,
+ "email": "different@gmail.com"
+ }`,
+ },
+ testUtils.CreateIndex{
+ CollectionID: 0,
+ FieldsNames: []string{"name", "age"},
+ IndexName: "name_age_unique_index",
+ Unique: true,
+ },
+ testUtils.GetIndexes{
+ CollectionID: 0,
+ ExpectedIndexes: []client.IndexDescription{
+ {
+ Name: "name_age_unique_index",
+ ID: 1,
+ Unique: true,
+ Fields: []client.IndexedFieldDescription{
+ {
+ Name: "name",
+ Direction: client.Ascending,
+ },
+ {
+ Name: "age",
+ Direction: client.Ascending,
+ },
+ },
+ },
+ },
+ },
+ },
+ }
+
+ testUtils.ExecuteTestCase(t, test)
+}
diff --git a/tests/integration/index/create_unique_test.go b/tests/integration/index/create_unique_test.go
index fac2330a28..303e91589a 100644
--- a/tests/integration/index/create_unique_test.go
+++ b/tests/integration/index/create_unique_test.go
@@ -15,6 +15,7 @@ import (
"github.com/sourcenetwork/defradb/client"
"github.com/sourcenetwork/defradb/db"
+ "github.com/sourcenetwork/defradb/errors"
testUtils "github.com/sourcenetwork/defradb/tests/integration"
)
@@ -57,10 +58,11 @@ func TestCreateUniqueIndex_IfFieldValuesAreNotUnique_ReturnError(t *testing.T) {
}`,
},
testUtils.CreateIndex{
- CollectionID: 0,
- FieldName: "age",
- Unique: true,
- ExpectedError: db.NewErrCanNotIndexNonUniqueField(johnDocID, "age", 21).Error(),
+ CollectionID: 0,
+ FieldName: "age",
+ Unique: true,
+ ExpectedError: db.NewErrCanNotIndexNonUniqueFields(
+ johnDocID, errors.NewKV("age", 21)).Error(),
},
testUtils.GetIndexes{
CollectionID: 0,
@@ -99,7 +101,8 @@ func TestUniqueIndexCreate_UponAddingDocWithExistingFieldValue_ReturnError(t *te
"name": "John",
"age": 21
}`,
- ExpectedError: db.NewErrCanNotIndexNonUniqueField(johnDocID, "age", 21).Error(),
+ ExpectedError: db.NewErrCanNotIndexNonUniqueFields(
+ johnDocID, errors.NewKV("age", 21)).Error(),
},
testUtils.Request{
Request: `query {
@@ -222,10 +225,11 @@ func TestUniqueIndexCreate_IfNilFieldsArePresent_ReturnError(t *testing.T) {
}`,
},
testUtils.CreateIndex{
- CollectionID: 0,
- FieldName: "age",
- Unique: true,
- ExpectedError: db.NewErrCanNotIndexNonUniqueField("bae-caba9876-89aa-5bcf-bc1c-387a52499b27", "age", nil).Error(),
+ CollectionID: 0,
+ FieldName: "age",
+ Unique: true,
+ ExpectedError: db.NewErrCanNotIndexNonUniqueFields(
+ "bae-caba9876-89aa-5bcf-bc1c-387a52499b27", errors.NewKV("age", nil)).Error(),
},
},
}
@@ -291,7 +295,8 @@ func TestUniqueIndexCreate_UponAddingDocWithExistingNilValue_ReturnError(t *test
{
"name": "Andy"
}`,
- ExpectedError: db.NewErrCanNotIndexNonUniqueField("bae-2159860f-3cd1-59de-9440-71331e77cbb8", "age", nil).Error(),
+ ExpectedError: db.NewErrCanNotIndexNonUniqueFields(
+ "bae-2159860f-3cd1-59de-9440-71331e77cbb8", errors.NewKV("age", nil)).Error(),
},
},
}
diff --git a/tests/integration/index/query_with_composite_index_only_filter_test.go b/tests/integration/index/query_with_composite_index_only_filter_test.go
new file mode 100644
index 0000000000..ce09ec1f89
--- /dev/null
+++ b/tests/integration/index/query_with_composite_index_only_filter_test.go
@@ -0,0 +1,828 @@
+// Copyright 2024 Democratized Data Foundation
+//
+// Use of this software is governed by the Business Source License
+// included in the file licenses/BSL.txt.
+//
+// As of the Change Date specified in that file, in accordance with
+// the Business Source License, use of this software will be governed
+// by the Apache License, Version 2.0, included in the file
+// licenses/APL.txt.
+
+package index
+
+import (
+ "testing"
+
+ testUtils "github.com/sourcenetwork/defradb/tests/integration"
+)
+
+func TestQueryWithCompositeIndex_WithEqualFilter_ShouldFetch(t *testing.T) {
+ req1 := `query {
+ User(filter: {name: {_eq: "Islam"}}) {
+ name
+ age
+ }
+ }`
+ req2 := `query {
+ User(filter: {name: {_eq: "Islam"}, age: {_eq: 32}}) {
+ name
+ age
+ }
+ }`
+ req3 := `query {
+ User(filter: {name: {_eq: "Islam"}, age: {_eq: 66}}) {
+ name
+ age
+ }
+ }`
+ test := testUtils.TestCase{
+ Description: "Test filtering on composite index with _eq filter",
+ Actions: []any{
+ testUtils.SchemaUpdate{
+ Schema: `
+ type User @index(fields: ["name", "age"]) {
+ name: String
+ age: Int
+ email: String
+ }`,
+ },
+ testUtils.CreatePredefinedDocs{
+ Docs: getUserDocs(),
+ },
+ testUtils.Request{
+ Request: req1,
+ Results: []map[string]any{
+ {"name": "Islam", "age": 32},
+ },
+ },
+ testUtils.Request{
+ Request: makeExplainQuery(req1),
+ Asserter: testUtils.NewExplainAsserter().WithFieldFetches(1).WithIndexFetches(1),
+ },
+ testUtils.Request{
+ Request: req2,
+ Results: []map[string]any{
+ {"name": "Islam", "age": 32},
+ },
+ },
+ testUtils.Request{
+ Request: makeExplainQuery(req2),
+ Asserter: testUtils.NewExplainAsserter().WithFieldFetches(1).WithIndexFetches(1),
+ },
+ testUtils.Request{
+ Request: req3,
+ Results: []map[string]any{},
+ },
+ },
+ }
+
+ testUtils.ExecuteTestCase(t, test)
+}
+
+func TestQueryWithCompositeIndex_WithGreaterThanFilterOnFirstField_ShouldFetch(t *testing.T) {
+ req := `query {
+ User(filter: {name: {_ne: "Keenan"}, age: {_gt: 44}}) {
+ name
+ }
+ }`
+ test := testUtils.TestCase{
+ Description: "Test index filtering with _gt filter",
+ Actions: []any{
+ testUtils.SchemaUpdate{
+ Schema: `
+ type User @index(fields: ["age", "name"]) {
+ name: String
+ age: Int
+ email: String
+ }`,
+ },
+ testUtils.CreatePredefinedDocs{
+ Docs: getUserDocs(),
+ },
+ testUtils.Request{
+ Request: req,
+ Results: []map[string]any{
+ {"name": "Chris"},
+ },
+ },
+ testUtils.Request{
+ Request: makeExplainQuery(req),
+ Asserter: testUtils.NewExplainAsserter().WithFieldFetches(1).WithIndexFetches(10),
+ },
+ },
+ }
+
+ testUtils.ExecuteTestCase(t, test)
+}
+
+func TestQueryWithCompositeIndex_WithGreaterThanFilterOnSecondField_ShouldFetch(t *testing.T) {
+ req := `query {
+ User(filter: {name: {_ne: "Keenan"}, age: {_gt: 44}}) {
+ name
+ }
+ }`
+ test := testUtils.TestCase{
+ Description: "Test index filtering with _gt filter",
+ Actions: []any{
+ testUtils.SchemaUpdate{
+ Schema: `
+ type User @index(fields: ["name", "age"]) {
+ name: String
+ age: Int
+ email: String
+ }`,
+ },
+ testUtils.CreatePredefinedDocs{
+ Docs: getUserDocs(),
+ },
+ testUtils.Request{
+ Request: req,
+ Results: []map[string]any{
+ {"name": "Chris"},
+ },
+ },
+ testUtils.Request{
+ Request: makeExplainQuery(req),
+ Asserter: testUtils.NewExplainAsserter().WithFieldFetches(1).WithIndexFetches(10),
+ },
+ },
+ }
+
+ testUtils.ExecuteTestCase(t, test)
+}
+
+func TestQueryWithCompositeIndex_WithGreaterOrEqualFilterOnFirstField_ShouldFetch(t *testing.T) {
+ req := `query {
+ User(filter: {name: {_ne: "Keenan"}, age: {_ge: 44},}) {
+ name
+ }
+ }`
+ test := testUtils.TestCase{
+ Description: "Test index filtering with _ge filter",
+ Actions: []any{
+ testUtils.SchemaUpdate{
+ Schema: `
+ type User @index(fields: ["age", "name"]) {
+ name: String
+ age: Int
+ email: String
+ }`,
+ },
+ testUtils.CreatePredefinedDocs{
+ Docs: getUserDocs(),
+ },
+ testUtils.Request{
+ Request: req,
+ Results: []map[string]any{
+ {"name": "Roy"},
+ {"name": "Chris"},
+ },
+ },
+ testUtils.Request{
+ Request: makeExplainQuery(req),
+ Asserter: testUtils.NewExplainAsserter().WithFieldFetches(2).WithIndexFetches(10),
+ },
+ },
+ }
+
+ testUtils.ExecuteTestCase(t, test)
+}
+
+func TestQueryWithCompositeIndex_WithGreaterOrEqualFilterOnSecondField_ShouldFetch(t *testing.T) {
+ req := `query {
+ User(filter: {age: {_ge: 44}, name: {_ne: "Keenan"}}) {
+ name
+ }
+ }`
+ test := testUtils.TestCase{
+ Description: "Test index filtering with _ge filter",
+ Actions: []any{
+ testUtils.SchemaUpdate{
+ Schema: `
+ type User @index(fields: ["name", "age"]) {
+ name: String
+ age: Int
+ email: String
+ }`,
+ },
+ testUtils.CreatePredefinedDocs{
+ Docs: getUserDocs(),
+ },
+ testUtils.Request{
+ Request: req,
+ Results: []map[string]any{
+ {"name": "Roy"},
+ {"name": "Chris"},
+ },
+ },
+ testUtils.Request{
+ Request: makeExplainQuery(req),
+ Asserter: testUtils.NewExplainAsserter().WithFieldFetches(2).WithIndexFetches(10),
+ },
+ },
+ }
+
+ testUtils.ExecuteTestCase(t, test)
+}
+
+func TestQueryWithCompositeIndex_WithLessThanFilterOnFirstField_ShouldFetch(t *testing.T) {
+ req := `query {
+ User(filter: {age: {_lt: 24}, name: {_ne: "Shahzad"}}) {
+ name
+ }
+ }`
+ test := testUtils.TestCase{
+ Description: "Test index filtering with _lt filter",
+ Actions: []any{
+ testUtils.SchemaUpdate{
+ Schema: `
+ type User @index(fields: ["age", "name"]) {
+ name: String
+ age: Int
+ email: String
+ }`,
+ },
+ testUtils.CreatePredefinedDocs{
+ Docs: getUserDocs(),
+ },
+ testUtils.Request{
+ Request: req,
+ Results: []map[string]any{
+ {"name": "Bruno"},
+ },
+ },
+ testUtils.Request{
+ Request: makeExplainQuery(req),
+ Asserter: testUtils.NewExplainAsserter().WithFieldFetches(1).WithIndexFetches(10),
+ },
+ },
+ }
+
+ testUtils.ExecuteTestCase(t, test)
+}
+
+func TestQueryWithCompositeIndex_WithLessThanFilterOnSecondField_ShouldFetch(t *testing.T) {
+ req := `query {
+ User(filter: {age: {_lt: 24}, name: {_ne: "Shahzad"}}) {
+ name
+ }
+ }`
+ test := testUtils.TestCase{
+ Description: "Test index filtering with _lt filter",
+ Actions: []any{
+ testUtils.SchemaUpdate{
+ Schema: `
+ type User @index(fields: ["name", "age"]) {
+ name: String
+ age: Int
+ email: String
+ }`,
+ },
+ testUtils.CreatePredefinedDocs{
+ Docs: getUserDocs(),
+ },
+ testUtils.Request{
+ Request: req,
+ Results: []map[string]any{
+ {"name": "Bruno"},
+ },
+ },
+ testUtils.Request{
+ Request: makeExplainQuery(req),
+ Asserter: testUtils.NewExplainAsserter().WithFieldFetches(1).WithIndexFetches(10),
+ },
+ },
+ }
+
+ testUtils.ExecuteTestCase(t, test)
+}
+
+func TestQueryWithCompositeIndex_WithLessOrEqualFilterOnFirstField_ShouldFetch(t *testing.T) {
+ req := `query {
+ User(filter: {age: {_le: 28}, name: {_ne: "Bruno"}}) {
+ name
+ }
+ }`
+ test := testUtils.TestCase{
+ Description: "Test index filtering with _le filter",
+ Actions: []any{
+ testUtils.SchemaUpdate{
+ Schema: `
+ type User @index(fields: ["age", "name"]) {
+ name: String
+ age: Int
+ email: String
+ }`,
+ },
+ testUtils.CreatePredefinedDocs{
+ Docs: getUserDocs(),
+ },
+ testUtils.Request{
+ Request: req,
+ Results: []map[string]any{
+ {"name": "Shahzad"},
+ {"name": "Fred"},
+ },
+ },
+ testUtils.Request{
+ Request: makeExplainQuery(req),
+ Asserter: testUtils.NewExplainAsserter().WithFieldFetches(2).WithIndexFetches(10),
+ },
+ },
+ }
+
+ testUtils.ExecuteTestCase(t, test)
+}
+
+func TestQueryWithCompositeIndex_WithLessOrEqualFilterOnSecondField_ShouldFetch(t *testing.T) {
+ req := `query {
+ User(filter: {age: {_le: 28}, name: {_ne: "Bruno"}}) {
+ name
+ }
+ }`
+ test := testUtils.TestCase{
+ Description: "Test index filtering with _le filter",
+ Actions: []any{
+ testUtils.SchemaUpdate{
+ Schema: `
+ type User @index(fields: ["name", "age"]) {
+ name: String
+ age: Int
+ email: String
+ }`,
+ },
+ testUtils.CreatePredefinedDocs{
+ Docs: getUserDocs(),
+ },
+ testUtils.Request{
+ Request: req,
+ Results: []map[string]any{
+ {"name": "Fred"},
+ {"name": "Shahzad"},
+ },
+ },
+ testUtils.Request{
+ Request: makeExplainQuery(req),
+ Asserter: testUtils.NewExplainAsserter().WithFieldFetches(2).WithIndexFetches(10),
+ },
+ },
+ }
+
+ testUtils.ExecuteTestCase(t, test)
+}
+
+func TestQueryWithCompositeIndex_WithNotEqualFilter_ShouldFetch(t *testing.T) {
+ req := `query {
+ User(filter: {name: {_ne: "Islam"}, age: {_ne: 28}}) {
+ name
+ }
+ }`
+ test := testUtils.TestCase{
+ Description: "Test index filtering with _ne filter",
+ Actions: []any{
+ testUtils.SchemaUpdate{
+ Schema: `
+ type User @index(fields: ["name", "age"]) {
+ name: String
+ age: Int
+ email: String
+ }`,
+ },
+ testUtils.CreatePredefinedDocs{
+ Docs: getUserDocs(),
+ },
+ testUtils.Request{
+ Request: req,
+ Results: []map[string]any{
+ {"name": "Roy"},
+ {"name": "Addo"},
+ {"name": "Andy"},
+ {"name": "John"},
+ {"name": "Bruno"},
+ {"name": "Chris"},
+ {"name": "Keenan"},
+ {"name": "Shahzad"},
+ },
+ },
+ testUtils.Request{
+ Request: makeExplainQuery(req),
+ Asserter: testUtils.NewExplainAsserter().WithFieldFetches(8).WithIndexFetches(10),
+ },
+ },
+ }
+
+ testUtils.ExecuteTestCase(t, test)
+}
+
+func TestQueryWithCompositeIndex_WithInFilter_ShouldFetch(t *testing.T) {
+ req := `query {
+ User(filter: {age: {_in: [20, 28, 33]}, name: {_in: ["Addo", "Andy", "Fred"]}}) {
+ name
+ }
+ }`
+ test := testUtils.TestCase{
+ Description: "Test index filtering with _in filter",
+ Actions: []any{
+ testUtils.SchemaUpdate{
+ Schema: `
+ type User @index(fields: ["name", "age"]) {
+ name: String
+ age: Int
+ email: String
+ }`,
+ },
+ testUtils.CreatePredefinedDocs{
+ Docs: getUserDocs(),
+ },
+ testUtils.Request{
+ Request: req,
+ Results: []map[string]any{
+ {"name": "Andy"},
+ {"name": "Fred"},
+ },
+ },
+ testUtils.Request{
+ Request: makeExplainQuery(req),
+ Asserter: testUtils.NewExplainAsserter().WithFieldFetches(2).WithIndexFetches(3),
+ },
+ },
+ }
+
+ testUtils.ExecuteTestCase(t, test)
+}
+
+func TestQueryWithCompositeIndex_WithNotInFilter_ShouldFetch(t *testing.T) {
+ req := `query {
+ User(filter: {age: {_nin: [20, 23, 28, 42]}, name: {_nin: ["John", "Andy", "Chris"]}}) {
+ name
+ }
+ }`
+ test := testUtils.TestCase{
+ Description: "Test index filtering with _nin filter",
+ Actions: []any{
+ testUtils.SchemaUpdate{
+ Schema: `
+ type User @index(fields: ["name", "age"]) {
+ name: String
+ age: Int
+ email: String
+ }`,
+ },
+ testUtils.CreatePredefinedDocs{
+ Docs: getUserDocs(),
+ },
+ testUtils.Request{
+ Request: req,
+ Results: []map[string]any{
+ {"name": "Roy"},
+ {"name": "Islam"},
+ {"name": "Keenan"},
+ },
+ },
+ testUtils.Request{
+ Request: makeExplainQuery(req),
+ Asserter: testUtils.NewExplainAsserter().WithFieldFetches(3).WithIndexFetches(10),
+ },
+ },
+ }
+
+ testUtils.ExecuteTestCase(t, test)
+}
+
+func TestQueryWithCompositeIndex_WithLikeFilter_ShouldFetch(t *testing.T) {
+ req1 := `query {
+ User(filter: {email: {_like: "a%"}, name: {_like: "%o"}}) {
+ name
+ }
+ }`
+ req2 := `query {
+ User(filter: {email: {_like: "%d@gmail.com"}, name: {_like: "F%"}}) {
+ name
+ }
+ }`
+ req3 := `query {
+ User(filter: {email: {_like: "%e%"}, name: {_like: "%n%"}}) {
+ name
+ }
+ }`
+ req4 := `query {
+ User(filter: {email: {_like: "fred@gmail.com"}, name: {_like: "Fred"}}) {
+ name
+ }
+ }`
+ req5 := `query {
+ User(filter: {email: {_like: "a%@gmail.com"}, name: {_like: "%dd%"}}) {
+ name
+ }
+ }`
+ req6 := `query {
+ User(filter: {email: {_like: "a%com%m"}}) {
+ name
+ }
+ }`
+ req7 := `query {
+ User(filter: {email: {_like: "s%"}, name: {_like: "s%h%d"}}) {
+ name
+ }
+ }`
+ test := testUtils.TestCase{
+ Description: "Test index filtering with _like filter",
+ Actions: []any{
+ testUtils.SchemaUpdate{
+ Schema: `
+ type User @index(fields: ["name", "email"]) {
+ name: String
+ email: String
+ }`,
+ },
+ testUtils.CreatePredefinedDocs{
+ Docs: getUserDocs(),
+ },
+ testUtils.Request{
+ Request: req1,
+ Results: []map[string]any{
+ {"name": "Addo"},
+ },
+ },
+ testUtils.Request{
+ Request: makeExplainQuery(req1),
+ Asserter: testUtils.NewExplainAsserter().WithFieldFetches(1).WithIndexFetches(10),
+ },
+ testUtils.Request{
+ Request: req2,
+ Results: []map[string]any{
+ {"name": "Fred"},
+ },
+ },
+ testUtils.Request{
+ Request: makeExplainQuery(req2),
+ Asserter: testUtils.NewExplainAsserter().WithFieldFetches(1).WithIndexFetches(10),
+ },
+ testUtils.Request{
+ Request: req3,
+ Results: []map[string]any{
+ {"name": "Keenan"},
+ },
+ },
+ testUtils.Request{
+ Request: makeExplainQuery(req3),
+ Asserter: testUtils.NewExplainAsserter().WithFieldFetches(1).WithIndexFetches(10),
+ },
+ testUtils.Request{
+ Request: req4,
+ Results: []map[string]any{
+ {"name": "Fred"},
+ },
+ },
+ testUtils.Request{
+ Request: makeExplainQuery(req4),
+ Asserter: testUtils.NewExplainAsserter().WithFieldFetches(1).WithIndexFetches(10),
+ },
+ testUtils.Request{
+ Request: req5,
+ Results: []map[string]any{
+ {"name": "Addo"},
+ },
+ },
+ testUtils.Request{
+ Request: makeExplainQuery(req5),
+ Asserter: testUtils.NewExplainAsserter().WithFieldFetches(1).WithIndexFetches(10),
+ },
+ testUtils.Request{
+ Request: req6,
+ Results: []map[string]any{},
+ },
+ testUtils.Request{
+ Request: req7,
+ Results: []map[string]any{},
+ },
+ },
+ }
+
+ testUtils.ExecuteTestCase(t, test)
+}
+
+func TestQueryWithCompositeIndex_WithNotLikeFilter_ShouldFetch(t *testing.T) {
+ req := `query {
+ User(filter: {name: {_nlike: "%h%"}, email: {_nlike: "%d%"}}) {
+ name
+ }
+ }`
+ test := testUtils.TestCase{
+ Description: "Test index filtering with _nlike filter",
+ Actions: []any{
+ testUtils.SchemaUpdate{
+ Schema: `
+ type User @index(fields: ["name", "email"]) {
+ name: String
+ email: String
+ }`,
+ },
+ testUtils.CreatePredefinedDocs{
+ Docs: getUserDocs(),
+ },
+ testUtils.Request{
+ Request: req,
+ Results: []map[string]any{
+ {"name": "Roy"},
+ {"name": "Bruno"},
+ {"name": "Islam"},
+ {"name": "Keenan"},
+ },
+ },
+ testUtils.Request{
+ Request: makeExplainQuery(req),
+ Asserter: testUtils.NewExplainAsserter().WithFieldFetches(4).WithIndexFetches(10),
+ },
+ },
+ }
+
+ testUtils.ExecuteTestCase(t, test)
+}
+
+func TestQueryWithCompositeIndex_IfFirstFieldIsNotInFilter_ShouldNotUseIndex(t *testing.T) {
+ test := testUtils.TestCase{
+ Description: "Test if index is not used when first field is not in filter",
+ Actions: []any{
+ testUtils.SchemaUpdate{
+ Schema: `
+ type User @index(fields: ["name", "age"]) {
+ name: String
+ age: Int
+ email: String
+ }`,
+ },
+ testUtils.CreatePredefinedDocs{
+ Docs: getUserDocs(),
+ },
+ testUtils.Request{
+ Request: `query @explain(type: execute) {
+ User(filter: {age: {_eq: 32}}) {
+ name
+ }
+ }`,
+ Asserter: testUtils.NewExplainAsserter().WithFieldFetches(11).WithIndexFetches(0),
+ },
+ },
+ }
+
+ testUtils.ExecuteTestCase(t, test)
+}
+
+func TestQueryWithCompositeIndex_WithEqualFilterOnNilValueOnFirst_ShouldFetch(t *testing.T) {
+ test := testUtils.TestCase{
+ Description: "Test index filtering with _eq filter on nil value on first field",
+ Actions: []any{
+ testUtils.SchemaUpdate{
+ Schema: `
+ type User @index(fields: ["name", "age"]) {
+ name: String
+ age: Int
+ email: String
+ }`,
+ },
+ testUtils.CreateDoc{
+ CollectionID: 0,
+ Doc: `
+ {
+ "name": "Alice",
+ "age": 22
+ }`,
+ },
+ testUtils.CreateDoc{
+ CollectionID: 0,
+ Doc: `
+ {
+ "age": 32
+ }`,
+ },
+ testUtils.Request{
+ Request: `
+ query {
+ User(filter: {name: {_eq: null}}) {
+ name
+ age
+ }
+ }`,
+ Results: []map[string]any{
+ {"name": nil, "age": 32},
+ },
+ },
+ },
+ }
+
+ testUtils.ExecuteTestCase(t, test)
+}
+
+func TestQueryWithCompositeIndex_WithEqualFilterOnNilValueOnSecond_ShouldFetch(t *testing.T) {
+ test := testUtils.TestCase{
+ Description: "Test index filtering with _eq filter on nil value on second field",
+ Actions: []any{
+ testUtils.SchemaUpdate{
+ Schema: `
+ type User @index(fields: ["name", "age"]) {
+ name: String
+ age: Int
+ email: String
+ }`,
+ },
+ testUtils.CreateDoc{
+ CollectionID: 0,
+ Doc: `
+ {
+ "name": "Alice",
+ "age": 22
+ }`,
+ },
+ testUtils.CreateDoc{
+ CollectionID: 0,
+ Doc: `
+ {
+ "name": "Bob"
+ }`,
+ },
+ testUtils.CreateDoc{
+ CollectionID: 0,
+ Doc: `
+ {
+ "name": "Alice"
+ }`,
+ },
+ testUtils.Request{
+ Request: `
+ query {
+ User(filter: {name: {_eq: "Alice"}, age: {_eq: null}}) {
+ name
+ age
+ }
+ }`,
+ Results: []map[string]any{
+ {
+ "name": "Alice",
+ "age": nil,
+ },
+ },
+ },
+ },
+ }
+
+ testUtils.ExecuteTestCase(t, test)
+}
+
+func TestQueryWithCompositeIndex_IfMiddleFieldIsNotInFilter_ShouldIgnoreValue(t *testing.T) {
+ test := testUtils.TestCase{
+ Description: "Test composite index with filter without middle field",
+ Actions: []any{
+ testUtils.SchemaUpdate{
+ Schema: `
+ type User @index(fields: ["name", "email", "age"]) {
+ name: String
+ email: String
+ age: Int
+ }`,
+ },
+ testUtils.CreateDoc{
+ CollectionID: 0,
+ Doc: `
+ {
+ "name": "Alice",
+ "email": "alice@gmail.com",
+ "age": 22
+ }`,
+ },
+ testUtils.CreateDoc{
+ CollectionID: 0,
+ Doc: `
+ {
+ "name": "Alan",
+ "email": "alan@gmail.com",
+ "age": 38
+ }`,
+ },
+ testUtils.CreateDoc{
+ CollectionID: 0,
+ Doc: `
+ {
+ "name": "Bob",
+ "email": "bob@gmail.com",
+ "age": 51
+ }`,
+ },
+ testUtils.Request{
+ Request: `
+ query {
+ User(filter: {name: {_like: "%l%"}, age: {_gt: 30}}) {
+ name
+ }
+ }`,
+ Results: []map[string]any{
+ {
+ "name": "Alan",
+ },
+ },
+ },
+ },
+ }
+
+ testUtils.ExecuteTestCase(t, test)
+}
diff --git a/tests/integration/index/query_with_index_combined_filter_test.go b/tests/integration/index/query_with_index_combined_filter_test.go
index 8faf5fa37a..eabc28067c 100644
--- a/tests/integration/index/query_with_index_combined_filter_test.go
+++ b/tests/integration/index/query_with_index_combined_filter_test.go
@@ -46,7 +46,7 @@ func TestQueryWithIndex_IfIndexFilterWithRegular_ShouldFilter(t *testing.T) {
},
testUtils.Request{
Request: makeExplainQuery(req),
- Asserter: testUtils.NewExplainAsserter().WithDocFetches(3).WithFieldFetches(6).WithIndexFetches(3),
+ Asserter: testUtils.NewExplainAsserter().WithFieldFetches(6).WithIndexFetches(3),
},
},
}
@@ -86,7 +86,44 @@ func TestQueryWithIndex_IfMultipleIndexFiltersWithRegular_ShouldFilter(t *testin
},
testUtils.Request{
Request: makeExplainQuery(req),
- Asserter: testUtils.NewExplainAsserter().WithDocFetches(6).WithFieldFetches(18),
+ Asserter: testUtils.NewExplainAsserter().WithFieldFetches(18),
+ },
+ },
+ }
+
+ testUtils.ExecuteTestCase(t, test)
+}
+
+func TestQueryWithIndex_FilterOnNonIndexedField_ShouldIgnoreIndex(t *testing.T) {
+ req := `query {
+ User(filter: {
+ age: {_eq: 44}
+ }) {
+ name
+ }
+ }`
+ test := testUtils.TestCase{
+ Description: "If filter does not contain indexed field, index should be ignored",
+ Actions: []any{
+ testUtils.SchemaUpdate{
+ Schema: `
+ type User {
+ name: String @index
+ age: Int
+ }`,
+ },
+ testUtils.CreatePredefinedDocs{
+ Docs: getUserDocs(),
+ },
+ testUtils.Request{
+ Request: req,
+ Results: []map[string]any{
+ {"name": "Roy"},
+ },
+ },
+ testUtils.Request{
+ Request: makeExplainQuery(req),
+ Asserter: testUtils.NewExplainAsserter().WithIndexFetches(0),
},
},
}
diff --git a/tests/integration/index/query_with_index_only_filter_test.go b/tests/integration/index/query_with_index_only_filter_test.go
index 82779c5832..a7bab00aaf 100644
--- a/tests/integration/index/query_with_index_only_filter_test.go
+++ b/tests/integration/index/query_with_index_only_filter_test.go
@@ -45,7 +45,7 @@ func TestQueryWithIndex_WithNonIndexedFields_ShouldFetchAllOfThem(t *testing.T)
},
testUtils.Request{
Request: makeExplainQuery(req),
- Asserter: testUtils.NewExplainAsserter().WithDocFetches(1).WithFieldFetches(2).WithIndexFetches(1),
+ Asserter: testUtils.NewExplainAsserter().WithFieldFetches(2).WithIndexFetches(1),
},
},
}
@@ -79,7 +79,7 @@ func TestQueryWithIndex_WithEqualFilter_ShouldFetch(t *testing.T) {
},
testUtils.Request{
Request: makeExplainQuery(req),
- Asserter: testUtils.NewExplainAsserter().WithDocFetches(1).WithFieldFetches(1).WithIndexFetches(1),
+ Asserter: testUtils.NewExplainAsserter().WithFieldFetches(1).WithIndexFetches(1),
},
},
}
@@ -122,7 +122,7 @@ func TestQueryWithIndex_IfSeveralDocsWithEqFilter_ShouldFetchAll(t *testing.T) {
},
testUtils.Request{
Request: makeExplainQuery(req),
- Asserter: testUtils.NewExplainAsserter().WithDocFetches(2).WithFieldFetches(4).WithIndexFetches(2),
+ Asserter: testUtils.NewExplainAsserter().WithFieldFetches(4).WithIndexFetches(2),
},
},
}
@@ -157,7 +157,7 @@ func TestQueryWithIndex_WithGreaterThanFilter_ShouldFetch(t *testing.T) {
},
testUtils.Request{
Request: makeExplainQuery(req),
- Asserter: testUtils.NewExplainAsserter().WithDocFetches(1).WithFieldFetches(2).WithIndexFetches(10),
+ Asserter: testUtils.NewExplainAsserter().WithFieldFetches(2).WithIndexFetches(10),
},
},
}
@@ -193,7 +193,7 @@ func TestQueryWithIndex_WithGreaterOrEqualFilter_ShouldFetch(t *testing.T) {
},
testUtils.Request{
Request: makeExplainQuery(req),
- Asserter: testUtils.NewExplainAsserter().WithDocFetches(2).WithFieldFetches(4).WithIndexFetches(10),
+ Asserter: testUtils.NewExplainAsserter().WithFieldFetches(4).WithIndexFetches(10),
},
},
}
@@ -228,7 +228,7 @@ func TestQueryWithIndex_WithLessThanFilter_ShouldFetch(t *testing.T) {
},
testUtils.Request{
Request: makeExplainQuery(req),
- Asserter: testUtils.NewExplainAsserter().WithDocFetches(1).WithFieldFetches(2).WithIndexFetches(10),
+ Asserter: testUtils.NewExplainAsserter().WithFieldFetches(2).WithIndexFetches(10),
},
},
}
@@ -264,7 +264,7 @@ func TestQueryWithIndex_WithLessOrEqualFilter_ShouldFetch(t *testing.T) {
},
testUtils.Request{
Request: makeExplainQuery(req),
- Asserter: testUtils.NewExplainAsserter().WithDocFetches(2).WithFieldFetches(4).WithIndexFetches(10),
+ Asserter: testUtils.NewExplainAsserter().WithFieldFetches(4).WithIndexFetches(10),
},
},
}
@@ -307,7 +307,7 @@ func TestQueryWithIndex_WithNotEqualFilter_ShouldFetch(t *testing.T) {
},
testUtils.Request{
Request: makeExplainQuery(req),
- Asserter: testUtils.NewExplainAsserter().WithDocFetches(9).WithFieldFetches(9).WithIndexFetches(10),
+ Asserter: testUtils.NewExplainAsserter().WithFieldFetches(9).WithIndexFetches(10),
},
},
}
@@ -343,7 +343,7 @@ func TestQueryWithIndex_WithInFilter_ShouldFetch(t *testing.T) {
},
testUtils.Request{
Request: makeExplainQuery(req),
- Asserter: testUtils.NewExplainAsserter().WithDocFetches(2).WithFieldFetches(4).WithIndexFetches(2),
+ Asserter: testUtils.NewExplainAsserter().WithFieldFetches(4).WithIndexFetches(2),
},
},
}
@@ -386,7 +386,7 @@ func TestQueryWithIndex_IfSeveralDocsWithInFilter_ShouldFetchAll(t *testing.T) {
},
testUtils.Request{
Request: makeExplainQuery(req),
- Asserter: testUtils.NewExplainAsserter().WithDocFetches(2).WithFieldFetches(4).WithIndexFetches(2),
+ Asserter: testUtils.NewExplainAsserter().WithFieldFetches(4).WithIndexFetches(2),
},
},
}
@@ -424,7 +424,7 @@ func TestQueryWithIndex_WithNotInFilter_ShouldFetch(t *testing.T) {
},
testUtils.Request{
Request: makeExplainQuery(req),
- Asserter: testUtils.NewExplainAsserter().WithDocFetches(4).WithFieldFetches(8).WithIndexFetches(10),
+ Asserter: testUtils.NewExplainAsserter().WithFieldFetches(8).WithIndexFetches(10),
},
},
}
@@ -485,7 +485,7 @@ func TestQueryWithIndex_WithLikeFilter_ShouldFetch(t *testing.T) {
},
testUtils.Request{
Request: makeExplainQuery(req1),
- Asserter: testUtils.NewExplainAsserter().WithDocFetches(2).WithFieldFetches(4).WithIndexFetches(10),
+ Asserter: testUtils.NewExplainAsserter().WithFieldFetches(4).WithIndexFetches(10),
},
testUtils.Request{
Request: req2,
@@ -496,7 +496,7 @@ func TestQueryWithIndex_WithLikeFilter_ShouldFetch(t *testing.T) {
},
testUtils.Request{
Request: makeExplainQuery(req2),
- Asserter: testUtils.NewExplainAsserter().WithDocFetches(2).WithFieldFetches(4).WithIndexFetches(10),
+ Asserter: testUtils.NewExplainAsserter().WithFieldFetches(4).WithIndexFetches(10),
},
testUtils.Request{
Request: req3,
@@ -507,7 +507,7 @@ func TestQueryWithIndex_WithLikeFilter_ShouldFetch(t *testing.T) {
},
testUtils.Request{
Request: makeExplainQuery(req3),
- Asserter: testUtils.NewExplainAsserter().WithDocFetches(2).WithFieldFetches(4).WithIndexFetches(10),
+ Asserter: testUtils.NewExplainAsserter().WithFieldFetches(4).WithIndexFetches(10),
},
testUtils.Request{
Request: req4,
@@ -517,7 +517,7 @@ func TestQueryWithIndex_WithLikeFilter_ShouldFetch(t *testing.T) {
},
testUtils.Request{
Request: makeExplainQuery(req4),
- Asserter: testUtils.NewExplainAsserter().WithDocFetches(1).WithFieldFetches(2).WithIndexFetches(10),
+ Asserter: testUtils.NewExplainAsserter().WithFieldFetches(2).WithIndexFetches(10),
},
testUtils.Request{
Request: req5,
@@ -528,7 +528,7 @@ func TestQueryWithIndex_WithLikeFilter_ShouldFetch(t *testing.T) {
},
testUtils.Request{
Request: makeExplainQuery(req5),
- Asserter: testUtils.NewExplainAsserter().WithDocFetches(2).WithFieldFetches(4).WithIndexFetches(10),
+ Asserter: testUtils.NewExplainAsserter().WithFieldFetches(4).WithIndexFetches(10),
},
testUtils.Request{
Request: req6,
@@ -536,7 +536,7 @@ func TestQueryWithIndex_WithLikeFilter_ShouldFetch(t *testing.T) {
},
testUtils.Request{
Request: makeExplainQuery(req6),
- Asserter: testUtils.NewExplainAsserter().WithDocFetches(0).WithFieldFetches(0).WithIndexFetches(10),
+ Asserter: testUtils.NewExplainAsserter().WithFieldFetches(0).WithIndexFetches(10),
},
},
}
@@ -577,7 +577,7 @@ func TestQueryWithIndex_WithNotLikeFilter_ShouldFetch(t *testing.T) {
},
testUtils.Request{
Request: makeExplainQuery(req),
- Asserter: testUtils.NewExplainAsserter().WithDocFetches(7).WithFieldFetches(7).WithIndexFetches(10),
+ Asserter: testUtils.NewExplainAsserter().WithFieldFetches(7).WithIndexFetches(10),
},
},
}
diff --git a/tests/integration/index/query_with_relation_filter_test.go b/tests/integration/index/query_with_relation_filter_test.go
index 57a43bf69e..d8fb14e6d4 100644
--- a/tests/integration/index/query_with_relation_filter_test.go
+++ b/tests/integration/index/query_with_relation_filter_test.go
@@ -60,7 +60,7 @@ func TestQueryWithIndexOnOneToManyRelation_IfFilterOnIndexedRelation_ShouldFilte
},
testUtils.Request{
Request: makeExplainQuery(req1),
- Asserter: testUtils.NewExplainAsserter().WithDocFetches(6).WithFieldFetches(9).WithIndexFetches(3),
+ Asserter: testUtils.NewExplainAsserter().WithFieldFetches(9).WithIndexFetches(3),
},
testUtils.Request{
Request: req2,
@@ -70,7 +70,7 @@ func TestQueryWithIndexOnOneToManyRelation_IfFilterOnIndexedRelation_ShouldFilte
},
testUtils.Request{
Request: makeExplainQuery(req2),
- Asserter: testUtils.NewExplainAsserter().WithDocFetches(2).WithFieldFetches(3).WithIndexFetches(1),
+ Asserter: testUtils.NewExplainAsserter().WithFieldFetches(3).WithIndexFetches(1),
},
},
}
@@ -122,7 +122,7 @@ func TestQueryWithIndexOnOneToManyRelation_IfFilterOnIndexedRelation_ShouldFilte
},
testUtils.Request{
Request: makeExplainQuery(req1),
- Asserter: testUtils.NewExplainAsserter().WithDocFetches(6).WithFieldFetches(9).WithIndexFetches(3),
+ Asserter: testUtils.NewExplainAsserter().WithFieldFetches(9).WithIndexFetches(3),
},
testUtils.Request{
Request: req2,
@@ -132,7 +132,7 @@ func TestQueryWithIndexOnOneToManyRelation_IfFilterOnIndexedRelation_ShouldFilte
},
testUtils.Request{
Request: makeExplainQuery(req2),
- Asserter: testUtils.NewExplainAsserter().WithDocFetches(2).WithFieldFetches(3).WithIndexFetches(1),
+ Asserter: testUtils.NewExplainAsserter().WithFieldFetches(3).WithIndexFetches(1),
},
},
}
@@ -182,7 +182,7 @@ func TestQueryWithIndexOnOneToOnesSecondaryRelation_IfFilterOnIndexedRelation_Sh
},
testUtils.Request{
Request: makeExplainQuery(req1),
- Asserter: testUtils.NewExplainAsserter().WithDocFetches(2).WithFieldFetches(3).WithIndexFetches(1),
+ Asserter: testUtils.NewExplainAsserter().WithFieldFetches(3).WithIndexFetches(1),
},
testUtils.Request{
Request: req2,
@@ -194,7 +194,7 @@ func TestQueryWithIndexOnOneToOnesSecondaryRelation_IfFilterOnIndexedRelation_Sh
},
testUtils.Request{
Request: makeExplainQuery(req2),
- Asserter: testUtils.NewExplainAsserter().WithDocFetches(6).WithFieldFetches(9).WithIndexFetches(3),
+ Asserter: testUtils.NewExplainAsserter().WithFieldFetches(9).WithIndexFetches(3),
},
},
}
@@ -245,7 +245,7 @@ func TestQueryWithIndexOnOneToOnePrimaryRelation_IfFilterOnIndexedFieldOfRelatio
},
testUtils.Request{
Request: makeExplainQuery(req1),
- Asserter: testUtils.NewExplainAsserter().WithDocFetches(11).WithFieldFetches(12).WithIndexFetches(1),
+ Asserter: testUtils.NewExplainAsserter().WithFieldFetches(12).WithIndexFetches(1),
},
testUtils.Request{
Request: req2,
@@ -257,7 +257,7 @@ func TestQueryWithIndexOnOneToOnePrimaryRelation_IfFilterOnIndexedFieldOfRelatio
},
testUtils.Request{
Request: makeExplainQuery(req2),
- Asserter: testUtils.NewExplainAsserter().WithDocFetches(15).WithFieldFetches(18).WithIndexFetches(3),
+ Asserter: testUtils.NewExplainAsserter().WithFieldFetches(18).WithIndexFetches(3),
},
},
}
@@ -301,7 +301,7 @@ func TestQueryWithIndexOnOneToOnePrimaryRelation_IfFilterOnIndexedRelationWhileI
},
testUtils.Request{
Request: makeExplainQuery(req),
- Asserter: testUtils.NewExplainAsserter().WithDocFetches(11).WithFieldFetches(12).WithIndexFetches(1),
+ Asserter: testUtils.NewExplainAsserter().WithFieldFetches(12).WithIndexFetches(1),
},
},
}
@@ -368,7 +368,7 @@ func TestQueryWithIndexOnOneToTwoRelation_IfFilterOnIndexedRelation_ShouldFilter
},
testUtils.Request{
Request: makeExplainQuery(req1),
- Asserter: testUtils.NewExplainAsserter().WithDocFetches(2).WithFieldFetches(3).WithIndexFetches(1),
+ Asserter: testUtils.NewExplainAsserter().WithFieldFetches(3).WithIndexFetches(1),
},
testUtils.Request{
Request: req2,
@@ -383,7 +383,7 @@ func TestQueryWithIndexOnOneToTwoRelation_IfFilterOnIndexedRelation_ShouldFilter
},
testUtils.Request{
Request: makeExplainQuery(req2),
- Asserter: testUtils.NewExplainAsserter().WithDocFetches(2).WithFieldFetches(3).WithIndexFetches(1),
+ Asserter: testUtils.NewExplainAsserter().WithFieldFetches(3).WithIndexFetches(1),
},
},
}
diff --git a/tests/integration/index/query_with_unique_composite_index_filter_test.go b/tests/integration/index/query_with_unique_composite_index_filter_test.go
new file mode 100644
index 0000000000..17e1ac76ea
--- /dev/null
+++ b/tests/integration/index/query_with_unique_composite_index_filter_test.go
@@ -0,0 +1,937 @@
+// Copyright 2024 Democratized Data Foundation
+//
+// Use of this software is governed by the Business Source License
+// included in the file licenses/BSL.txt.
+//
+// As of the Change Date specified in that file, in accordance with
+// the Business Source License, use of this software will be governed
+// by the Apache License, Version 2.0, included in the file
+// licenses/APL.txt.
+
+package index
+
+import (
+ "testing"
+
+ testUtils "github.com/sourcenetwork/defradb/tests/integration"
+)
+
+func TestQueryWithUniqueCompositeIndex_WithEqualFilter_ShouldFetch(t *testing.T) {
+ req1 := `query {
+ User(filter: {name: {_eq: "Islam"}}) {
+ name
+ age
+ }
+ }`
+ req2 := `query {
+ User(filter: {name: {_eq: "Islam"}, age: {_eq: 32}}) {
+ name
+ age
+ }
+ }`
+ req3 := `query {
+ User(filter: {name: {_eq: "Islam"}, age: {_eq: 66}}) {
+ name
+ age
+ }
+ }`
+ test := testUtils.TestCase{
+ Description: "Test filtering on composite index with _eq filter",
+ Actions: []any{
+ testUtils.SchemaUpdate{
+ Schema: `
+ type User @index(unique: true, fields: ["name", "age"]) {
+ name: String
+ age: Int
+ email: String
+ }`,
+ },
+ testUtils.CreatePredefinedDocs{
+ Docs: getUserDocs(),
+ },
+ testUtils.CreateDoc{
+ CollectionID: 0,
+ Doc: `
+ {
+ "name": "Islam",
+ "age": 40
+ }`,
+ },
+ testUtils.CreateDoc{
+ CollectionID: 0,
+ Doc: `
+ {
+ "name": "Islam",
+ "age": 50
+ }`,
+ },
+ testUtils.Request{
+ Request: req1,
+ Results: []map[string]any{
+ {"name": "Islam", "age": 32},
+ {"name": "Islam", "age": 40},
+ {"name": "Islam", "age": 50},
+ },
+ },
+ testUtils.Request{
+ Request: makeExplainQuery(req1),
+ Asserter: testUtils.NewExplainAsserter().WithFieldFetches(3).WithIndexFetches(3),
+ },
+ testUtils.Request{
+ Request: req2,
+ Results: []map[string]any{
+ {"name": "Islam", "age": 32},
+ },
+ },
+ testUtils.Request{
+ Request: makeExplainQuery(req2),
+ Asserter: testUtils.NewExplainAsserter().WithFieldFetches(1).WithIndexFetches(1),
+ },
+ testUtils.Request{
+ Request: req3,
+ Results: []map[string]any{},
+ },
+ },
+ }
+
+ testUtils.ExecuteTestCase(t, test)
+}
+
+func TestQueryWithUniqueCompositeIndex_WithGreaterThanFilterOnFirstField_ShouldFetch(t *testing.T) {
+ req := `query {
+ User(filter: {name: {_ne: "Keenan"}, age: {_gt: 44}}) {
+ name
+ }
+ }`
+ test := testUtils.TestCase{
+ Description: "Test index filtering with _gt filter",
+ Actions: []any{
+ testUtils.SchemaUpdate{
+ Schema: `
+ type User @index(unique: true, fields: ["age", "name"]) {
+ name: String
+ age: Int
+ email: String
+ }`,
+ },
+ testUtils.CreatePredefinedDocs{
+ Docs: getUserDocs(),
+ },
+ testUtils.Request{
+ Request: req,
+ Results: []map[string]any{
+ {"name": "Chris"},
+ },
+ },
+ testUtils.Request{
+ Request: makeExplainQuery(req),
+ Asserter: testUtils.NewExplainAsserter().WithFieldFetches(1).WithIndexFetches(10),
+ },
+ },
+ }
+
+ testUtils.ExecuteTestCase(t, test)
+}
+
+func TestQueryWithUniqueCompositeIndex_WithGreaterThanFilterOnSecondField_ShouldFetch(t *testing.T) {
+ req := `query {
+ User(filter: {name: {_ne: "Keenan"}, age: {_gt: 44}}) {
+ name
+ }
+ }`
+ test := testUtils.TestCase{
+ Description: "Test index filtering with _gt filter",
+ Actions: []any{
+ testUtils.SchemaUpdate{
+ Schema: `
+ type User @index(unique: true, fields: ["name", "age"]) {
+ name: String
+ age: Int
+ email: String
+ }`,
+ },
+ testUtils.CreatePredefinedDocs{
+ Docs: getUserDocs(),
+ },
+ testUtils.Request{
+ Request: req,
+ Results: []map[string]any{
+ {"name": "Chris"},
+ },
+ },
+ testUtils.Request{
+ Request: makeExplainQuery(req),
+ Asserter: testUtils.NewExplainAsserter().WithFieldFetches(1).WithIndexFetches(10),
+ },
+ },
+ }
+
+ testUtils.ExecuteTestCase(t, test)
+}
+
+func TestQueryWithUniqueCompositeIndex_WithGreaterOrEqualFilterOnFirstField_ShouldFetch(t *testing.T) {
+ req := `query {
+ User(filter: {name: {_ne: "Keenan"}, age: {_ge: 44},}) {
+ name
+ }
+ }`
+ test := testUtils.TestCase{
+ Description: "Test index filtering with _ge filter",
+ Actions: []any{
+ testUtils.SchemaUpdate{
+ Schema: `
+ type User @index(unique: true, fields: ["age", "name"]) {
+ name: String
+ age: Int
+ email: String
+ }`,
+ },
+ testUtils.CreatePredefinedDocs{
+ Docs: getUserDocs(),
+ },
+ testUtils.Request{
+ Request: req,
+ Results: []map[string]any{
+ {"name": "Roy"},
+ {"name": "Chris"},
+ },
+ },
+ testUtils.Request{
+ Request: makeExplainQuery(req),
+ Asserter: testUtils.NewExplainAsserter().WithFieldFetches(2).WithIndexFetches(10),
+ },
+ },
+ }
+
+ testUtils.ExecuteTestCase(t, test)
+}
+
+func TestQueryWithUniqueCompositeIndex_WithGreaterOrEqualFilterOnSecondField_ShouldFetch(t *testing.T) {
+ req := `query {
+ User(filter: {age: {_ge: 44}, name: {_ne: "Keenan"}}) {
+ name
+ }
+ }`
+ test := testUtils.TestCase{
+ Description: "Test index filtering with _ge filter",
+ Actions: []any{
+ testUtils.SchemaUpdate{
+ Schema: `
+ type User @index(unique: true, fields: ["name", "age"]) {
+ name: String
+ age: Int
+ email: String
+ }`,
+ },
+ testUtils.CreatePredefinedDocs{
+ Docs: getUserDocs(),
+ },
+ testUtils.Request{
+ Request: req,
+ Results: []map[string]any{
+ {"name": "Roy"},
+ {"name": "Chris"},
+ },
+ },
+ testUtils.Request{
+ Request: makeExplainQuery(req),
+ Asserter: testUtils.NewExplainAsserter().WithFieldFetches(2).WithIndexFetches(10),
+ },
+ },
+ }
+
+ testUtils.ExecuteTestCase(t, test)
+}
+
+func TestQueryWithUniqueCompositeIndex_WithLessThanFilterOnFirstField_ShouldFetch(t *testing.T) {
+ req := `query {
+ User(filter: {age: {_lt: 24}, name: {_ne: "Shahzad"}}) {
+ name
+ }
+ }`
+ test := testUtils.TestCase{
+ Description: "Test index filtering with _lt filter",
+ Actions: []any{
+ testUtils.SchemaUpdate{
+ Schema: `
+ type User @index(unique: true, fields: ["age", "name"]) {
+ name: String
+ age: Int
+ email: String
+ }`,
+ },
+ testUtils.CreatePredefinedDocs{
+ Docs: getUserDocs(),
+ },
+ testUtils.Request{
+ Request: req,
+ Results: []map[string]any{
+ {"name": "Bruno"},
+ },
+ },
+ testUtils.Request{
+ Request: makeExplainQuery(req),
+ Asserter: testUtils.NewExplainAsserter().WithFieldFetches(1).WithIndexFetches(10),
+ },
+ },
+ }
+
+ testUtils.ExecuteTestCase(t, test)
+}
+
+func TestQueryWithUniqueCompositeIndex_WithLessThanFilterOnSecondField_ShouldFetch(t *testing.T) {
+ req := `query {
+ User(filter: {age: {_lt: 24}, name: {_ne: "Shahzad"}}) {
+ name
+ }
+ }`
+ test := testUtils.TestCase{
+ Description: "Test index filtering with _lt filter",
+ Actions: []any{
+ testUtils.SchemaUpdate{
+ Schema: `
+ type User @index(unique: true, fields: ["name", "age"]) {
+ name: String
+ age: Int
+ email: String
+ }`,
+ },
+ testUtils.CreatePredefinedDocs{
+ Docs: getUserDocs(),
+ },
+ testUtils.Request{
+ Request: req,
+ Results: []map[string]any{
+ {"name": "Bruno"},
+ },
+ },
+ testUtils.Request{
+ Request: makeExplainQuery(req),
+ Asserter: testUtils.NewExplainAsserter().WithFieldFetches(1).WithIndexFetches(10),
+ },
+ },
+ }
+
+ testUtils.ExecuteTestCase(t, test)
+}
+
+func TestQueryWithUniqueCompositeIndex_WithLessOrEqualFilterOnFirstField_ShouldFetch(t *testing.T) {
+ req := `query {
+ User(filter: {age: {_le: 28}, name: {_ne: "Bruno"}}) {
+ name
+ }
+ }`
+ test := testUtils.TestCase{
+ Description: "Test index filtering with _le filter",
+ Actions: []any{
+ testUtils.SchemaUpdate{
+ Schema: `
+ type User @index(unique: true, fields: ["age", "name"]) {
+ name: String
+ age: Int
+ email: String
+ }`,
+ },
+ testUtils.CreatePredefinedDocs{
+ Docs: getUserDocs(),
+ },
+ testUtils.Request{
+ Request: req,
+ Results: []map[string]any{
+ {"name": "Shahzad"},
+ {"name": "Fred"},
+ },
+ },
+ testUtils.Request{
+ Request: makeExplainQuery(req),
+ Asserter: testUtils.NewExplainAsserter().WithFieldFetches(2).WithIndexFetches(10),
+ },
+ },
+ }
+
+ testUtils.ExecuteTestCase(t, test)
+}
+
+func TestQueryWithUniqueCompositeIndex_WithLessOrEqualFilterOnSecondField_ShouldFetch(t *testing.T) {
+ req := `query {
+ User(filter: {age: {_le: 28}, name: {_ne: "Bruno"}}) {
+ name
+ }
+ }`
+ test := testUtils.TestCase{
+ Description: "Test index filtering with _le filter",
+ Actions: []any{
+ testUtils.SchemaUpdate{
+ Schema: `
+ type User @index(unique: true, fields: ["name", "age"]) {
+ name: String
+ age: Int
+ email: String
+ }`,
+ },
+ testUtils.CreatePredefinedDocs{
+ Docs: getUserDocs(),
+ },
+ testUtils.Request{
+ Request: req,
+ Results: []map[string]any{
+ {"name": "Fred"},
+ {"name": "Shahzad"},
+ },
+ },
+ testUtils.Request{
+ Request: makeExplainQuery(req),
+ Asserter: testUtils.NewExplainAsserter().WithFieldFetches(2).WithIndexFetches(10),
+ },
+ },
+ }
+
+ testUtils.ExecuteTestCase(t, test)
+}
+
+func TestQueryWithUniqueCompositeIndex_WithNotEqualFilter_ShouldFetch(t *testing.T) {
+ req := `query {
+ User(filter: {name: {_ne: "Islam"}, age: {_ne: 28}}) {
+ name
+ }
+ }`
+ test := testUtils.TestCase{
+ Description: "Test index filtering with _ne filter",
+ Actions: []any{
+ testUtils.SchemaUpdate{
+ Schema: `
+ type User @index(unique: true, fields: ["name", "age"]) {
+ name: String
+ age: Int
+ email: String
+ }`,
+ },
+ testUtils.CreatePredefinedDocs{
+ Docs: getUserDocs(),
+ },
+ testUtils.Request{
+ Request: req,
+ Results: []map[string]any{
+ {"name": "Roy"},
+ {"name": "Addo"},
+ {"name": "Andy"},
+ {"name": "John"},
+ {"name": "Bruno"},
+ {"name": "Chris"},
+ {"name": "Keenan"},
+ {"name": "Shahzad"},
+ },
+ },
+ testUtils.Request{
+ Request: makeExplainQuery(req),
+ Asserter: testUtils.NewExplainAsserter().WithFieldFetches(8).WithIndexFetches(10),
+ },
+ },
+ }
+
+ testUtils.ExecuteTestCase(t, test)
+}
+
+func TestQueryWithUniqueCompositeIndex_WithInForFirstAndEqForRest_ShouldFetchEfficiently(t *testing.T) {
+ req := `query {
+ User(filter: {age: {_eq: 33}, name: {_in: ["Addo", "Andy", "Fred"]}}) {
+ name
+ age
+ }
+ }`
+ test := testUtils.TestCase{
+ Description: "Test index filtering with _in filter",
+ Actions: []any{
+ testUtils.SchemaUpdate{
+ Schema: `
+ type User @index(unique: true, fields: ["name", "age"]) {
+ name: String
+ age: Int
+ email: String
+ }`,
+ },
+ testUtils.CreateDoc{
+ CollectionID: 0,
+ Doc: `
+ {
+ "name": "Addo",
+ "age": 33
+ }`,
+ },
+ testUtils.CreateDoc{
+ CollectionID: 0,
+ Doc: `
+ {
+ "name": "Addo",
+ "age": 88
+ }`,
+ },
+ testUtils.CreateDoc{
+ CollectionID: 0,
+ Doc: `
+ {
+ "name": "Andy",
+ "age": 33
+ }`,
+ },
+ testUtils.CreateDoc{
+ CollectionID: 0,
+ Doc: `
+ {
+ "name": "Andy",
+ "age": 70
+ }`,
+ },
+ testUtils.CreateDoc{
+ CollectionID: 0,
+ Doc: `
+ {
+ "name": "Andy",
+ "age": 51
+ }`,
+ },
+ testUtils.Request{
+ Request: req,
+ Results: []map[string]any{
+ {"name": "Addo", "age": 33},
+ {"name": "Andy", "age": 33},
+ },
+ },
+ testUtils.Request{
+ Request: makeExplainQuery(req),
+ Asserter: testUtils.NewExplainAsserter().WithFieldFetches(2).WithIndexFetches(2),
+ },
+ },
+ }
+
+ testUtils.ExecuteTestCase(t, test)
+}
+
+func TestQueryWithUniqueCompositeIndex_WithInFilter_ShouldFetch(t *testing.T) {
+ req := `query {
+ User(filter: {age: {_in: [20, 28, 33]}, name: {_in: ["Addo", "Andy", "Fred"]}}) {
+ name
+ }
+ }`
+ test := testUtils.TestCase{
+ Description: "Test index filtering with _in filter",
+ Actions: []any{
+ testUtils.SchemaUpdate{
+ Schema: `
+ type User @index(unique: true, fields: ["name", "age"]) {
+ name: String
+ age: Int
+ email: String
+ }`,
+ },
+ testUtils.CreatePredefinedDocs{
+ Docs: getUserDocs(),
+ },
+ testUtils.CreateDoc{
+ CollectionID: 0,
+ Doc: `
+ {
+ "name": "Addo",
+ "age": 10
+ }`,
+ },
+ testUtils.CreateDoc{
+ CollectionID: 0,
+ Doc: `
+ {
+ "name": "Addo",
+ "age": 88
+ }`,
+ },
+ testUtils.Request{
+ Request: req,
+ Results: []map[string]any{
+ {"name": "Andy"},
+ {"name": "Fred"},
+ },
+ },
+ testUtils.Request{
+ Request: makeExplainQuery(req),
+ Asserter: testUtils.NewExplainAsserter().WithFieldFetches(2).WithIndexFetches(5),
+ },
+ },
+ }
+
+ testUtils.ExecuteTestCase(t, test)
+}
+
+func TestQueryWithUniqueCompositeIndex_WithNotInFilter_ShouldFetch(t *testing.T) {
+ req := `query {
+ User(filter: {age: {_nin: [20, 23, 28, 42]}, name: {_nin: ["John", "Andy", "Chris"]}}) {
+ name
+ }
+ }`
+ test := testUtils.TestCase{
+ Description: "Test index filtering with _nin filter",
+ Actions: []any{
+ testUtils.SchemaUpdate{
+ Schema: `
+ type User @index(unique: true, fields: ["name", "age"]) {
+ name: String
+ age: Int
+ email: String
+ }`,
+ },
+ testUtils.CreatePredefinedDocs{
+ Docs: getUserDocs(),
+ },
+ testUtils.Request{
+ Request: req,
+ Results: []map[string]any{
+ {"name": "Roy"},
+ {"name": "Islam"},
+ {"name": "Keenan"},
+ },
+ },
+ testUtils.Request{
+ Request: makeExplainQuery(req),
+ Asserter: testUtils.NewExplainAsserter().WithFieldFetches(3).WithIndexFetches(10),
+ },
+ },
+ }
+
+ testUtils.ExecuteTestCase(t, test)
+}
+
+func TestQueryWithUniqueCompositeIndex_WithLikeFilter_ShouldFetch(t *testing.T) {
+ req1 := `query {
+ User(filter: {email: {_like: "a%"}, name: {_like: "%o"}}) {
+ name
+ }
+ }`
+ req2 := `query {
+ User(filter: {email: {_like: "%d@gmail.com"}, name: {_like: "F%"}}) {
+ name
+ }
+ }`
+ req3 := `query {
+ User(filter: {email: {_like: "%e%"}, name: {_like: "%n%"}}) {
+ name
+ }
+ }`
+ req4 := `query {
+ User(filter: {email: {_like: "fred@gmail.com"}, name: {_like: "Fred"}}) {
+ name
+ }
+ }`
+ req5 := `query {
+ User(filter: {email: {_like: "a%@gmail.com"}, name: {_like: "%dd%"}}) {
+ name
+ }
+ }`
+ req6 := `query {
+ User(filter: {email: {_like: "a%com%m"}}) {
+ name
+ }
+ }`
+ req7 := `query {
+ User(filter: {email: {_like: "s%"}, name: {_like: "s%h%d"}}) {
+ name
+ }
+ }`
+ test := testUtils.TestCase{
+ Description: "Test index filtering with _like filter",
+ Actions: []any{
+ testUtils.SchemaUpdate{
+ Schema: `
+ type User @index(unique: true, fields: ["name", "email"]) {
+ name: String
+ email: String
+ }`,
+ },
+ testUtils.CreatePredefinedDocs{
+ Docs: getUserDocs(),
+ },
+ testUtils.Request{
+ Request: req1,
+ Results: []map[string]any{
+ {"name": "Addo"},
+ },
+ },
+ testUtils.Request{
+ Request: makeExplainQuery(req1),
+ Asserter: testUtils.NewExplainAsserter().WithFieldFetches(1).WithIndexFetches(10),
+ },
+ testUtils.Request{
+ Request: req2,
+ Results: []map[string]any{
+ {"name": "Fred"},
+ },
+ },
+ testUtils.Request{
+ Request: makeExplainQuery(req2),
+ Asserter: testUtils.NewExplainAsserter().WithFieldFetches(1).WithIndexFetches(10),
+ },
+ testUtils.Request{
+ Request: req3,
+ Results: []map[string]any{
+ {"name": "Keenan"},
+ },
+ },
+ testUtils.Request{
+ Request: makeExplainQuery(req3),
+ Asserter: testUtils.NewExplainAsserter().WithFieldFetches(1).WithIndexFetches(10),
+ },
+ testUtils.Request{
+ Request: req4,
+ Results: []map[string]any{
+ {"name": "Fred"},
+ },
+ },
+ testUtils.Request{
+ Request: makeExplainQuery(req4),
+ Asserter: testUtils.NewExplainAsserter().WithFieldFetches(1).WithIndexFetches(10),
+ },
+ testUtils.Request{
+ Request: req5,
+ Results: []map[string]any{
+ {"name": "Addo"},
+ },
+ },
+ testUtils.Request{
+ Request: makeExplainQuery(req5),
+ Asserter: testUtils.NewExplainAsserter().WithFieldFetches(1).WithIndexFetches(10),
+ },
+ testUtils.Request{
+ Request: req6,
+ Results: []map[string]any{},
+ },
+ testUtils.Request{
+ Request: req7,
+ Results: []map[string]any{},
+ },
+ },
+ }
+
+ testUtils.ExecuteTestCase(t, test)
+}
+
+func TestQueryWithUniqueCompositeIndex_WithNotLikeFilter_ShouldFetch(t *testing.T) {
+ req := `query {
+ User(filter: {name: {_nlike: "%h%"}, email: {_nlike: "%d%"}}) {
+ name
+ }
+ }`
+ test := testUtils.TestCase{
+ Description: "Test index filtering with _nlike filter",
+ Actions: []any{
+ testUtils.SchemaUpdate{
+ Schema: `
+ type User @index(unique: true, fields: ["name", "email"]) {
+ name: String
+ email: String
+ }`,
+ },
+ testUtils.CreatePredefinedDocs{
+ Docs: getUserDocs(),
+ },
+ testUtils.Request{
+ Request: req,
+ Results: []map[string]any{
+ {"name": "Roy"},
+ {"name": "Bruno"},
+ {"name": "Islam"},
+ {"name": "Keenan"},
+ },
+ },
+ testUtils.Request{
+ Request: makeExplainQuery(req),
+ Asserter: testUtils.NewExplainAsserter().WithFieldFetches(4).WithIndexFetches(10),
+ },
+ },
+ }
+
+ testUtils.ExecuteTestCase(t, test)
+}
+
+func TestQueryWithUniqueCompositeIndex_IfFirstFieldIsNotInFilter_ShouldNotUseIndex(t *testing.T) {
+ test := testUtils.TestCase{
+ Description: "Test if index is not used when first field is not in filter",
+ Actions: []any{
+ testUtils.SchemaUpdate{
+ Schema: `
+ type User @index(unique: true, fields: ["name", "age"]) {
+ name: String
+ age: Int
+ email: String
+ }`,
+ },
+ testUtils.CreatePredefinedDocs{
+ Docs: getUserDocs(),
+ },
+ testUtils.Request{
+ Request: `query @explain(type: execute) {
+ User(filter: {age: {_eq: 32}}) {
+ name
+ }
+ }`,
+ Asserter: testUtils.NewExplainAsserter().WithFieldFetches(11).WithIndexFetches(0),
+ },
+ },
+ }
+
+ testUtils.ExecuteTestCase(t, test)
+}
+
+func TestQueryWithUniqueCompositeIndex_WithEqualFilterOnNilValueOnFirst_ShouldFetch(t *testing.T) {
+ test := testUtils.TestCase{
+ Description: "Test index filtering with _eq filter on nil value on first field",
+ Actions: []any{
+ testUtils.SchemaUpdate{
+ Schema: `
+ type User @index(unique: true, fields: ["name", "age"]) {
+ name: String
+ age: Int
+ email: String
+ }`,
+ },
+ testUtils.CreateDoc{
+ CollectionID: 0,
+ Doc: `
+ {
+ "name": "Alice",
+ "age": 22
+ }`,
+ },
+ testUtils.CreateDoc{
+ CollectionID: 0,
+ Doc: `
+ {
+ "age": 32
+ }`,
+ },
+ testUtils.Request{
+ Request: `
+ query {
+ User(filter: {name: {_eq: null}}) {
+ name
+ age
+ }
+ }`,
+ Results: []map[string]any{
+ {"name": nil, "age": 32},
+ },
+ },
+ },
+ }
+
+ testUtils.ExecuteTestCase(t, test)
+}
+
+func TestQueryWithUniqueCompositeIndex_WithEqualFilterOnNilValueOnSecond_ShouldFetch(t *testing.T) {
+ test := testUtils.TestCase{
+ Description: "Test index filtering with _eq filter on nil value on second field",
+ Actions: []any{
+ testUtils.SchemaUpdate{
+ Schema: `
+ type User @index(unique: true, fields: ["name", "age"]) {
+ name: String
+ age: Int
+ email: String
+ }`,
+ },
+ testUtils.CreateDoc{
+ CollectionID: 0,
+ Doc: `
+ {
+ "name": "Alice",
+ "age": 22
+ }`,
+ },
+ testUtils.CreateDoc{
+ CollectionID: 0,
+ Doc: `
+ {
+ "name": "Bob"
+ }`,
+ },
+ testUtils.CreateDoc{
+ CollectionID: 0,
+ Doc: `
+ {
+ "name": "Alice"
+ }`,
+ },
+ testUtils.Request{
+ Request: `
+ query {
+ User(filter: {name: {_eq: "Alice"}, age: {_eq: null}}) {
+ name
+ age
+ }
+ }`,
+ Results: []map[string]any{
+ {
+ "name": "Alice",
+ "age": nil,
+ },
+ },
+ },
+ },
+ }
+
+ testUtils.ExecuteTestCase(t, test)
+}
+
+func TestQueryWithUniqueCompositeIndex_IfMiddleFieldIsNotInFilter_ShouldIgnoreValue(t *testing.T) {
+ test := testUtils.TestCase{
+ Description: "Test composite index with filter without middle field",
+ Actions: []any{
+ testUtils.SchemaUpdate{
+ Schema: `
+ type User @index(unique: true, fields: ["name", "email", "age"]) {
+ name: String
+ email: String
+ age: Int
+ }`,
+ },
+ testUtils.CreateDoc{
+ CollectionID: 0,
+ Doc: `
+ {
+ "name": "Alice",
+ "email": "alice@gmail.com",
+ "age": 22
+ }`,
+ },
+ testUtils.CreateDoc{
+ CollectionID: 0,
+ Doc: `
+ {
+ "name": "Alan",
+ "email": "alan@gmail.com",
+ "age": 38
+ }`,
+ },
+ testUtils.CreateDoc{
+ CollectionID: 0,
+ Doc: `
+ {
+ "name": "Bob",
+ "email": "bob@gmail.com",
+ "age": 51
+ }`,
+ },
+ testUtils.Request{
+ Request: `
+ query {
+ User(filter: {name: {_like: "%l%"}, age: {_gt: 30}}) {
+ name
+ }
+ }`,
+ Results: []map[string]any{
+ {
+ "name": "Alan",
+ },
+ },
+ },
+ },
+ }
+
+ testUtils.ExecuteTestCase(t, test)
+}
diff --git a/tests/integration/index/query_with_unique_index_only_filter_test.go b/tests/integration/index/query_with_unique_index_only_filter_test.go
index ad453409d4..cb9b23ebec 100644
--- a/tests/integration/index/query_with_unique_index_only_filter_test.go
+++ b/tests/integration/index/query_with_unique_index_only_filter_test.go
@@ -42,7 +42,7 @@ func TestQueryWithUniqueIndex_WithEqualFilter_ShouldFetch(t *testing.T) {
},
testUtils.Request{
Request: makeExplainQuery(req),
- Asserter: testUtils.NewExplainAsserter().WithDocFetches(1).WithFieldFetches(1).WithIndexFetches(1),
+ Asserter: testUtils.NewExplainAsserter().WithFieldFetches(1).WithIndexFetches(1),
},
},
}
@@ -77,7 +77,7 @@ func TestQueryWithUniqueIndex_WithGreaterThanFilter_ShouldFetch(t *testing.T) {
},
testUtils.Request{
Request: makeExplainQuery(req),
- Asserter: testUtils.NewExplainAsserter().WithDocFetches(1).WithFieldFetches(2).WithIndexFetches(10),
+ Asserter: testUtils.NewExplainAsserter().WithFieldFetches(2).WithIndexFetches(10),
},
},
}
@@ -113,7 +113,7 @@ func TestQueryWithUniqueIndex_WithGreaterOrEqualFilter_ShouldFetch(t *testing.T)
},
testUtils.Request{
Request: makeExplainQuery(req),
- Asserter: testUtils.NewExplainAsserter().WithDocFetches(2).WithFieldFetches(4).WithIndexFetches(10),
+ Asserter: testUtils.NewExplainAsserter().WithFieldFetches(4).WithIndexFetches(10),
},
},
}
@@ -148,7 +148,7 @@ func TestQueryWithUniqueIndex_WithLessThanFilter_ShouldFetch(t *testing.T) {
},
testUtils.Request{
Request: makeExplainQuery(req),
- Asserter: testUtils.NewExplainAsserter().WithDocFetches(1).WithFieldFetches(2).WithIndexFetches(10),
+ Asserter: testUtils.NewExplainAsserter().WithFieldFetches(2).WithIndexFetches(10),
},
},
}
@@ -184,7 +184,7 @@ func TestQueryWithUniqueIndex_WithLessOrEqualFilter_ShouldFetch(t *testing.T) {
},
testUtils.Request{
Request: makeExplainQuery(req),
- Asserter: testUtils.NewExplainAsserter().WithDocFetches(2).WithFieldFetches(4).WithIndexFetches(10),
+ Asserter: testUtils.NewExplainAsserter().WithFieldFetches(4).WithIndexFetches(10),
},
},
}
@@ -227,7 +227,7 @@ func TestQueryWithUniqueIndex_WithNotEqualFilter_ShouldFetch(t *testing.T) {
},
testUtils.Request{
Request: makeExplainQuery(req),
- Asserter: testUtils.NewExplainAsserter().WithDocFetches(9).WithFieldFetches(9).WithIndexFetches(10),
+ Asserter: testUtils.NewExplainAsserter().WithFieldFetches(9).WithIndexFetches(10),
},
},
}
@@ -263,7 +263,7 @@ func TestQueryWithUniqueIndex_WithInFilter_ShouldFetch(t *testing.T) {
},
testUtils.Request{
Request: makeExplainQuery(req),
- Asserter: testUtils.NewExplainAsserter().WithDocFetches(2).WithFieldFetches(4).WithIndexFetches(2),
+ Asserter: testUtils.NewExplainAsserter().WithFieldFetches(4).WithIndexFetches(2),
},
},
}
@@ -301,7 +301,7 @@ func TestQueryWithUniqueIndex_WithNotInFilter_ShouldFetch(t *testing.T) {
},
testUtils.Request{
Request: makeExplainQuery(req),
- Asserter: testUtils.NewExplainAsserter().WithDocFetches(4).WithFieldFetches(8).WithIndexFetches(10),
+ Asserter: testUtils.NewExplainAsserter().WithFieldFetches(8).WithIndexFetches(10),
},
},
}
@@ -362,7 +362,7 @@ func TestQueryWithUniqueIndex_WithLikeFilter_ShouldFetch(t *testing.T) {
},
testUtils.Request{
Request: makeExplainQuery(req1),
- Asserter: testUtils.NewExplainAsserter().WithDocFetches(2).WithFieldFetches(4).WithIndexFetches(10),
+ Asserter: testUtils.NewExplainAsserter().WithFieldFetches(4).WithIndexFetches(10),
},
testUtils.Request{
Request: req2,
@@ -373,7 +373,7 @@ func TestQueryWithUniqueIndex_WithLikeFilter_ShouldFetch(t *testing.T) {
},
testUtils.Request{
Request: makeExplainQuery(req2),
- Asserter: testUtils.NewExplainAsserter().WithDocFetches(2).WithFieldFetches(4).WithIndexFetches(10),
+ Asserter: testUtils.NewExplainAsserter().WithFieldFetches(4).WithIndexFetches(10),
},
testUtils.Request{
Request: req3,
@@ -384,7 +384,7 @@ func TestQueryWithUniqueIndex_WithLikeFilter_ShouldFetch(t *testing.T) {
},
testUtils.Request{
Request: makeExplainQuery(req3),
- Asserter: testUtils.NewExplainAsserter().WithDocFetches(2).WithFieldFetches(4).WithIndexFetches(10),
+ Asserter: testUtils.NewExplainAsserter().WithFieldFetches(4).WithIndexFetches(10),
},
testUtils.Request{
Request: req4,
@@ -394,7 +394,7 @@ func TestQueryWithUniqueIndex_WithLikeFilter_ShouldFetch(t *testing.T) {
},
testUtils.Request{
Request: makeExplainQuery(req4),
- Asserter: testUtils.NewExplainAsserter().WithDocFetches(1).WithFieldFetches(2).WithIndexFetches(10),
+ Asserter: testUtils.NewExplainAsserter().WithFieldFetches(2).WithIndexFetches(10),
},
testUtils.Request{
Request: req5,
@@ -405,7 +405,7 @@ func TestQueryWithUniqueIndex_WithLikeFilter_ShouldFetch(t *testing.T) {
},
testUtils.Request{
Request: makeExplainQuery(req5),
- Asserter: testUtils.NewExplainAsserter().WithDocFetches(2).WithFieldFetches(4).WithIndexFetches(10),
+ Asserter: testUtils.NewExplainAsserter().WithFieldFetches(4).WithIndexFetches(10),
},
testUtils.Request{
Request: req6,
@@ -413,7 +413,7 @@ func TestQueryWithUniqueIndex_WithLikeFilter_ShouldFetch(t *testing.T) {
},
testUtils.Request{
Request: makeExplainQuery(req6),
- Asserter: testUtils.NewExplainAsserter().WithDocFetches(0).WithFieldFetches(0).WithIndexFetches(10),
+ Asserter: testUtils.NewExplainAsserter().WithFieldFetches(0).WithIndexFetches(10),
},
},
}
@@ -454,7 +454,7 @@ func TestQueryWithUniqueIndex_WithNotLikeFilter_ShouldFetch(t *testing.T) {
},
testUtils.Request{
Request: makeExplainQuery(req),
- Asserter: testUtils.NewExplainAsserter().WithDocFetches(7).WithFieldFetches(7).WithIndexFetches(10),
+ Asserter: testUtils.NewExplainAsserter().WithFieldFetches(7).WithIndexFetches(10),
},
},
}
diff --git a/tests/integration/schema/updates/add/field/kind/invalid_test.go b/tests/integration/schema/updates/add/field/kind/invalid_test.go
index 98f026ecc2..b9c6dbbf31 100644
--- a/tests/integration/schema/updates/add/field/kind/invalid_test.go
+++ b/tests/integration/schema/updates/add/field/kind/invalid_test.go
@@ -64,30 +64,6 @@ func TestSchemaUpdatesAddFieldKind9(t *testing.T) {
testUtils.ExecuteTestCase(t, test)
}
-func TestSchemaUpdatesAddFieldKind14(t *testing.T) {
- test := testUtils.TestCase{
- Description: "Test schema update, add field with kind deprecated (14)",
- Actions: []any{
- testUtils.SchemaUpdate{
- Schema: `
- type Users {
- name: String
- }
- `,
- },
- testUtils.SchemaPatch{
- Patch: `
- [
- { "op": "add", "path": "/Users/Fields/-", "value": {"Name": "foo", "Kind": 14} }
- ]
- `,
- ExpectedError: "no type found for given name. Type: 14",
- },
- },
- }
- testUtils.ExecuteTestCase(t, test)
-}
-
func TestSchemaUpdatesAddFieldKind15(t *testing.T) {
test := testUtils.TestCase{
Description: "Test schema update, add field with kind deprecated (15)",
diff --git a/tests/integration/schema/updates/add/field/kind/json_test.go b/tests/integration/schema/updates/add/field/kind/json_test.go
new file mode 100644
index 0000000000..37e2886a58
--- /dev/null
+++ b/tests/integration/schema/updates/add/field/kind/json_test.go
@@ -0,0 +1,137 @@
+// Copyright 2023 Democratized Data Foundation
+//
+// Use of this software is governed by the Business Source License
+// included in the file licenses/BSL.txt.
+//
+// As of the Change Date specified in that file, in accordance with
+// the Business Source License, use of this software will be governed
+// by the Apache License, Version 2.0, included in the file
+// licenses/APL.txt.
+
+package kind
+
+import (
+ "testing"
+
+ testUtils "github.com/sourcenetwork/defradb/tests/integration"
+)
+
+func TestSchemaUpdatesAddFieldKindJSON(t *testing.T) {
+ test := testUtils.TestCase{
+ Description: "Test schema update, add field with kind json (14)",
+ Actions: []any{
+ testUtils.SchemaUpdate{
+ Schema: `
+ type Users {
+ name: String
+ }
+ `,
+ },
+ testUtils.SchemaPatch{
+ Patch: `
+ [
+ { "op": "add", "path": "/Users/Fields/-", "value": {"Name": "foo", "Kind": 14} }
+ ]
+ `,
+ },
+ testUtils.Request{
+ Request: `query {
+ Users {
+ name
+ foo
+ }
+ }`,
+ Results: []map[string]any{},
+ },
+ },
+ }
+ testUtils.ExecuteTestCase(t, test)
+}
+
+func TestSchemaUpdatesAddFieldKindJSONWithCreate(t *testing.T) {
+ test := testUtils.TestCase{
+ Description: "Test schema update, add field with kind json (14) with create",
+ Actions: []any{
+ testUtils.SchemaUpdate{
+ Schema: `
+ type Users {
+ name: String
+ }
+ `,
+ },
+ testUtils.SchemaPatch{
+ Patch: `
+ [
+ { "op": "add", "path": "/Users/Fields/-", "value": {"Name": "foo", "Kind": 14} }
+ ]
+ `,
+ },
+ testUtils.CreateDoc{
+ CollectionID: 0,
+ Doc: `{
+ "name": "John",
+ "foo": "{}"
+ }`,
+ },
+ testUtils.Request{
+ Request: `query {
+ Users {
+ name
+ foo
+ }
+ }`,
+ Results: []map[string]any{
+ {
+ "name": "John",
+ "foo": "{}",
+ },
+ },
+ },
+ },
+ }
+ testUtils.ExecuteTestCase(t, test)
+}
+
+func TestSchemaUpdatesAddFieldKindJSONSubstitutionWithCreate(t *testing.T) {
+ test := testUtils.TestCase{
+ Description: "Test schema update, add field with kind json substitution with create",
+ Actions: []any{
+ testUtils.SchemaUpdate{
+ Schema: `
+ type Users {
+ name: String
+ }
+ `,
+ },
+ testUtils.SchemaPatch{
+ Patch: `
+ [
+ { "op": "add", "path": "/Users/Fields/-", "value": {"Name": "foo", "Kind": "JSON"} }
+ ]
+ `,
+ },
+ testUtils.CreateDoc{
+ CollectionID: 0,
+ Doc: `{
+ "name": "John",
+ "foo": "{}"
+ }`,
+ },
+ testUtils.Request{
+ Request: `query {
+ Users {
+ name
+ foo
+ }
+ }`,
+ Results: []map[string]any{
+ {
+ "name": "John",
+ "foo": "{}",
+ },
+ },
+ },
+ },
+ }
+ testUtils.ExecuteTestCase(t, test)
+}
diff --git a/tests/integration/test_case.go b/tests/integration/test_case.go
index 435f1cf9b4..2c0e095fe8 100644
--- a/tests/integration/test_case.go
+++ b/tests/integration/test_case.go
@@ -246,6 +246,7 @@ type CreateIndex struct {
// The names of the fields to index. Used only for composite indexes.
FieldsNames []string
// The directions of the 'FieldsNames' to index. Used only for composite indexes.
+ // If not provided all fields will be indexed in ascending order.
Directions []client.IndexDirection
// If Unique is true, the index will be created as a unique index.
diff --git a/tests/integration/utils2.go b/tests/integration/utils2.go
index dc344d49f7..4031e6342a 100644
--- a/tests/integration/utils2.go
+++ b/tests/integration/utils2.go
@@ -344,7 +344,7 @@ func createGenerateDocs(s *state, docs []gen.GeneratedDoc, nodeID immutable.Opti
if err != nil {
s.t.Fatalf("Failed to generate docs %s", err)
}
- createDoc(s, CreateDoc{CollectionID: nameToInd[doc.Col.Description.Name], Doc: docJSON, NodeID: nodeID})
+ createDoc(s, CreateDoc{CollectionID: nameToInd[doc.Col.Description.Name.Value()], Doc: docJSON, NodeID: nodeID})
}
}
@@ -352,7 +352,7 @@ func generateDocs(s *state, action GenerateDocs) {
collections := getNodeCollections(action.NodeID, s.collections)
defs := make([]client.CollectionDefinition, 0, len(collections[0]))
for _, col := range collections[0] {
- if len(action.ForCollections) == 0 || slice.Contains(action.ForCollections, col.Name()) {
+ if len(action.ForCollections) == 0 || slice.Contains(action.ForCollections, col.Name().Value()) {
defs = append(defs, col.Definition())
}
}
@@ -739,7 +739,7 @@ func refreshCollections(
for i, collectionName := range s.collectionNames {
for _, collection := range allCollections {
- if collection.Name() == collectionName {
+ if collection.Name().Value() == collectionName {
s.collections[nodeID][i] = collection
break
}
@@ -1155,7 +1155,7 @@ func createDocViaGQL(
_docID
}
}`,
- collection.Name(),
+ collection.Name().Value(),
input,
)
@@ -1302,7 +1302,7 @@ func updateDocViaGQL(
_docID
}
}`,
- collection.Name(),
+ collection.Name().Value(),
doc.ID().String(),
input,
)
@@ -1339,9 +1339,13 @@ func createIndex(
}
} else if len(action.FieldsNames) > 0 {
for i := range action.FieldsNames {
+ dir := client.Ascending
+ if len(action.Directions) > i {
+ dir = action.Directions[i]
+ }
indexDesc.Fields = append(indexDesc.Fields, client.IndexedFieldDescription{
Name: action.FieldsNames[i],
- Direction: action.Directions[i],
+ Direction: dir,
})
}
}
@@ -1883,7 +1887,7 @@ func ParseSDL(gqlSDL string) (map[string]client.CollectionDefinition, error) {
}
result := make(map[string]client.CollectionDefinition)
for _, col := range cols {
- result[col.Description.Name] = col
+ result[col.Description.Name.Value()] = col
}
return result, nil
}
diff --git a/tests/predefined/gen_predefined.go b/tests/predefined/gen_predefined.go
index 76e143c896..0a203fe2ed 100644
--- a/tests/predefined/gen_predefined.go
+++ b/tests/predefined/gen_predefined.go
@@ -31,7 +31,7 @@ func parseSDL(gqlSDL string) (map[string]client.CollectionDefinition, error) {
}
result := make(map[string]client.CollectionDefinition)
for _, col := range cols {
- result[col.Description.Name] = col
+ result[col.Description.Name.Value()] = col
}
return result, nil
}
@@ -85,7 +85,7 @@ func Create(defs []client.CollectionDefinition, docsList DocsList) ([]gen.Genera
resultDocs := make([]gen.GeneratedDoc, 0, len(docsList.Docs))
typeDefs := make(map[string]client.CollectionDefinition)
for _, col := range defs {
- typeDefs[col.Description.Name] = col
+ typeDefs[col.Description.Name.Value()] = col
}
generator := docGenerator{types: typeDefs}
for _, doc := range docsList.Docs {
@@ -151,7 +151,7 @@ func (this *docGenerator) generatePrimary(
result = append(result, subResult...)
secondaryDocs, err := this.generateSecondaryDocs(
- secDocMapField.(map[string]any), docID, &primType, secType.Description.Name)
+ secDocMapField.(map[string]any), docID, &primType, secType.Description.Name.Value())
if err != nil {
return nil, nil, err
}
@@ -202,7 +202,7 @@ func (this *docGenerator) generateSecondaryDocs(
if !field.IsPrimaryRelation() &&
(parentTypeName == "" || parentTypeName != field.Schema) {
docs, err := this.generateSecondaryDocsForField(
- primaryDocMap, primaryType.Description.Name, &field, docID)
+ primaryDocMap, primaryType.Description.Name.Value(), &field, docID)
if err != nil {
return nil, err
}
@@ -231,7 +231,7 @@ func (this *docGenerator) generateSecondaryDocsForField(
case []map[string]any:
for _, relDoc := range relVal {
relDoc[primaryPropName] = primaryDocID
- actions, err := this.generateRelatedDocs(relDoc, relTypeDef.Description.Name)
+ actions, err := this.generateRelatedDocs(relDoc, relTypeDef.Description.Name.Value())
if err != nil {
return nil, err
}
@@ -239,7 +239,7 @@ func (this *docGenerator) generateSecondaryDocsForField(
}
case map[string]any:
relVal[primaryPropName] = primaryDocID
- actions, err := this.generateRelatedDocs(relVal, relTypeDef.Description.Name)
+ actions, err := this.generateRelatedDocs(relVal, relTypeDef.Description.Name.Value())
if err != nil {
return nil, err
}
diff --git a/tools/cloud/akash/deploy.yaml b/tools/cloud/akash/deploy.yaml
index c6f7070f98..48d86fff8b 100644
--- a/tools/cloud/akash/deploy.yaml
+++ b/tools/cloud/akash/deploy.yaml
@@ -5,7 +5,7 @@ services:
defradb:
image: sourcenetwork/defradb:v0.6.0
args:
- - start
+ - start
- --url=0.0.0.0:9181
expose:
- port: 9161
@@ -40,7 +40,7 @@ profiles:
- "akash1365yvmc4s7awdyj3n2sav7xfx76adc6dnmlx63"
- "akash18qa2a2ltfyvkyj0ggj3hkvuj6twzyumuaru9s4"
pricing:
- defradb:
+ defradb:
denom: uakt
amount: 10000
@@ -48,4 +48,4 @@ deployment:
defradb:
akash:
profile: defradb
- count: 1
\ No newline at end of file
+ count: 1
diff --git a/tools/cloud/aws/packer/build_aws_ami.pkr.hcl b/tools/cloud/aws/packer/build_aws_ami.pkr.hcl
index 8afacfb339..4eb1579778 100644
--- a/tools/cloud/aws/packer/build_aws_ami.pkr.hcl
+++ b/tools/cloud/aws/packer/build_aws_ami.pkr.hcl
@@ -66,8 +66,8 @@ build {
inline = [
"/usr/bin/cloud-init status --wait",
"sudo apt-get update && sudo apt-get install make build-essential -y",
- "curl -OL https://golang.org/dl/go1.20.6.linux-amd64.tar.gz",
- "rm -rf /usr/local/go && sudo tar -C /usr/local -xzf go1.20.6.linux-amd64.tar.gz",
+ "curl -OL https://golang.org/dl/go1.21.6.linux-amd64.tar.gz",
+ "rm -rf /usr/local/go && sudo tar -C /usr/local -xzf go1.21.6.linux-amd64.tar.gz",
"export PATH=$PATH:/usr/local/go/bin",
"git clone \"https://git@$DEFRADB_GIT_REPO\"",
"cd ./defradb || { printf \"\\\ncd into defradb failed.\\\n\" && exit 2; }",
diff --git a/tools/configs/golangci.yaml b/tools/configs/golangci.yaml
index c9d69b641e..561cfd7138 100644
--- a/tools/configs/golangci.yaml
+++ b/tools/configs/golangci.yaml
@@ -57,7 +57,7 @@ run:
# Define the Go version limit.
# Default: use Go version from the go.mod file, fallback on the env var `GOVERSION`.
- go: "1.20"
+ go: "1.21"
#=====================================================================================[ Output Configuration Options ]
output:
@@ -263,7 +263,7 @@ linters-settings:
gosimple:
# Select the Go version to target.
- go: "1.20"
+ go: "1.21"
# https://staticcheck.io/docs/options#checks
checks: ["all", "-S1038"]
# Turn on all except (these are disabled):
@@ -355,13 +355,13 @@ linters-settings:
staticcheck:
# Select the Go version to target.
- go: "1.20"
+ go: "1.21"
# https://staticcheck.io/docs/options#checks
checks: ["all"]
unused:
# Select the Go version to target.
- go: "1.20"
+ go: "1.21"
whitespace:
# Enforces newlines (or comments) after every multi-line if statement.
diff --git a/tools/configs/yamllint.yaml b/tools/configs/yamllint.yaml
new file mode 100644
index 0000000000..b7f98b844a
--- /dev/null
+++ b/tools/configs/yamllint.yaml
@@ -0,0 +1,39 @@
+# This file contains our linter configurations that will be used for all Source Inc. projects.
+
+yaml-files:
+ - '*.yaml'
+ - '*.yml'
+ - '.yamllint'
+
+rules:
+ # Disabled lint rules
+ comments: disable
+ comments-indentation: disable
+ document-end: disable
+ document-start: disable
+ empty-values: disable
+ float-values: disable
+ key-ordering: disable
+ octal-values: disable
+ quoted-strings: disable
+
+ # Enabled lint rules
+ anchors: enable
+ braces: enable
+ brackets: enable
+ colons: enable
+ commas: enable
+ empty-lines: enable
+ hyphens: enable
+ key-duplicates: enable
+ new-line-at-end-of-file: enable
+ new-lines: enable
+ trailing-spaces: enable
+ truthy: disable
+ line-length:
+ max: 120
+ level: error
+ indentation:
+ indent-sequences: consistent
+ spaces: consistent
+ check-multi-line-strings: false
diff --git a/tools/defradb.containerfile b/tools/defradb.containerfile
index 272a7e67ba..53a849b6c6 100644
--- a/tools/defradb.containerfile
+++ b/tools/defradb.containerfile
@@ -11,7 +11,7 @@ RUN npm run build
# Stage: BUILD
# Several steps are involved to enable caching and because of the behavior of COPY regarding directories.
-FROM docker.io/golang:1.20 AS BUILD
+FROM docker.io/golang:1.21 AS BUILD
WORKDIR /repo/
COPY go.mod go.sum Makefile ./
RUN make deps:modules